semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/METADATA +5 -4
- semantic_link_labs-0.11.2.dist-info/RECORD +210 -0
- sempy_labs/__init__.py +56 -56
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_dax_query_view.py +1 -1
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +23 -22
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_ml_models.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +1 -1
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_variable_libraries.py +13 -12
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +84 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +1 -1
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +111 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +30 -2
- sempy_labs/lakehouse/_lakehouse.py +2 -2
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -3
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +3 -4
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -2
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +4 -4
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +247 -249
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +19 -6
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +5 -5
- semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.2.dist-info}/top_level.txt +0 -0
|
@@ -2,7 +2,7 @@ import os
|
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import pyarrow.parquet as pq
|
|
4
4
|
from datetime import datetime
|
|
5
|
-
from
|
|
5
|
+
from .._helper_functions import (
|
|
6
6
|
_get_column_aggregate,
|
|
7
7
|
resolve_workspace_name_and_id,
|
|
8
8
|
resolve_lakehouse_name_and_id,
|
|
@@ -15,11 +15,11 @@ from sempy_labs._helper_functions import (
|
|
|
15
15
|
create_abfss_path,
|
|
16
16
|
_pure_python_notebook,
|
|
17
17
|
)
|
|
18
|
-
from
|
|
18
|
+
from ..directlake._guardrails import (
|
|
19
19
|
get_sku_size,
|
|
20
20
|
get_directlake_guardrails_for_sku,
|
|
21
21
|
)
|
|
22
|
-
from
|
|
22
|
+
from ._lakehouse import lakehouse_attached
|
|
23
23
|
from typing import Optional
|
|
24
24
|
import sempy_labs._icons as icons
|
|
25
25
|
from sempy._utils._log import log
|
|
@@ -97,7 +97,7 @@ def get_lakehouse_tables(
|
|
|
97
97
|
except Exception as e:
|
|
98
98
|
API_called = False
|
|
99
99
|
|
|
100
|
-
|
|
100
|
+
rows = []
|
|
101
101
|
local_path = None
|
|
102
102
|
if API_called:
|
|
103
103
|
if not responses[0].get("data"):
|
|
@@ -105,16 +105,17 @@ def get_lakehouse_tables(
|
|
|
105
105
|
|
|
106
106
|
for r in responses:
|
|
107
107
|
for i in r.get("data", []):
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
108
|
+
rows.append(
|
|
109
|
+
{
|
|
110
|
+
"Workspace Name": workspace_name,
|
|
111
|
+
"Lakehouse Name": lakehouse_name,
|
|
112
|
+
"Schema Name": "",
|
|
113
|
+
"Table Name": i.get("name"),
|
|
114
|
+
"Format": i.get("format"),
|
|
115
|
+
"Type": i.get("type"),
|
|
116
|
+
"Location": i.get("location"),
|
|
117
|
+
}
|
|
118
|
+
)
|
|
118
119
|
else:
|
|
119
120
|
local_path = _mount(lakehouse=lakehouse_id, workspace=workspace_id)
|
|
120
121
|
tables_path = os.path.join(local_path, "Tables")
|
|
@@ -127,19 +128,20 @@ def get_lakehouse_tables(
|
|
|
127
128
|
location_path = create_abfss_path(
|
|
128
129
|
lakehouse_id, workspace_id, table_name, schema_name
|
|
129
130
|
)
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
131
|
+
rows.append(
|
|
132
|
+
{
|
|
133
|
+
"Workspace Name": workspace_name,
|
|
134
|
+
"Lakehouse Name": lakehouse_name,
|
|
135
|
+
"Schema Name": schema_name,
|
|
136
|
+
"Table Name": table_name,
|
|
137
|
+
"Format": "delta",
|
|
138
|
+
"Type": "Managed",
|
|
139
|
+
"Location": location_path,
|
|
140
|
+
}
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
if rows:
|
|
144
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
143
145
|
|
|
144
146
|
if extended:
|
|
145
147
|
sku_value = get_sku_size(workspace_id)
|
sempy_labs/lakehouse/_helper.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from typing import Optional, Literal
|
|
3
3
|
import pyarrow.dataset as ds
|
|
4
|
-
from
|
|
4
|
+
from .._helper_functions import (
|
|
5
5
|
_mount,
|
|
6
6
|
delete_item,
|
|
7
7
|
_base_api,
|
|
@@ -11,6 +11,7 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
from sempy._utils._log import log
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
13
|
import os
|
|
14
|
+
import json
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
@log
|
|
@@ -51,7 +52,34 @@ def is_v_ordered(
|
|
|
51
52
|
)
|
|
52
53
|
ds_schema = ds.dataset(table_path).schema.metadata
|
|
53
54
|
|
|
54
|
-
|
|
55
|
+
if ds_schema:
|
|
56
|
+
return any(b"vorder" in key for key in ds_schema.keys())
|
|
57
|
+
|
|
58
|
+
delta_log_path = os.path.join(table_path, "_delta_log")
|
|
59
|
+
|
|
60
|
+
def read_vorder_tag(delta_log_path):
|
|
61
|
+
json_files = sorted(
|
|
62
|
+
[f for f in os.listdir(delta_log_path) if f.endswith(".json")], reverse=True
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
if not json_files:
|
|
66
|
+
return False
|
|
67
|
+
|
|
68
|
+
latest_file = os.path.join(delta_log_path, json_files[0])
|
|
69
|
+
|
|
70
|
+
with open(latest_file, "r") as f:
|
|
71
|
+
for line in f:
|
|
72
|
+
try:
|
|
73
|
+
data = json.loads(line)
|
|
74
|
+
if "commitInfo" in data:
|
|
75
|
+
tags = data["commitInfo"].get("tags", {})
|
|
76
|
+
return tags.get("VORDER", "false").lower() == "true"
|
|
77
|
+
except json.JSONDecodeError:
|
|
78
|
+
continue # Skip malformed lines
|
|
79
|
+
|
|
80
|
+
return False # Default if not found
|
|
81
|
+
|
|
82
|
+
return read_vorder_tag(delta_log_path)
|
|
55
83
|
|
|
56
84
|
|
|
57
85
|
@log
|
|
@@ -2,7 +2,7 @@ from tqdm.auto import tqdm
|
|
|
2
2
|
from typing import List, Optional, Union
|
|
3
3
|
from sempy._utils._log import log
|
|
4
4
|
from uuid import UUID
|
|
5
|
-
from
|
|
5
|
+
from .._helper_functions import (
|
|
6
6
|
_base_api,
|
|
7
7
|
resolve_lakehouse_name_and_id,
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
@@ -13,7 +13,7 @@ import sempy_labs._icons as icons
|
|
|
13
13
|
import re
|
|
14
14
|
import time
|
|
15
15
|
import pandas as pd
|
|
16
|
-
from
|
|
16
|
+
from .._job_scheduler import (
|
|
17
17
|
_get_item_job_instance,
|
|
18
18
|
)
|
|
19
19
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from
|
|
1
|
+
from .._helper_functions import (
|
|
2
2
|
resolve_workspace_id,
|
|
3
3
|
resolve_lakehouse_id,
|
|
4
4
|
_base_api,
|
|
@@ -86,53 +86,58 @@ def list_livy_sessions(
|
|
|
86
86
|
client="fabric_sp",
|
|
87
87
|
)
|
|
88
88
|
|
|
89
|
-
|
|
89
|
+
rows = []
|
|
90
90
|
for r in responses:
|
|
91
91
|
for v in r.get("value", []):
|
|
92
92
|
queued_duration = v.get("queuedDuration", {})
|
|
93
93
|
running_duration = v.get("runningDuration", {})
|
|
94
94
|
total_duration = v.get("totalDuration", {})
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
95
|
+
rows.append(
|
|
96
|
+
{
|
|
97
|
+
"Spark Application Id": v.get("sparkApplicationId"),
|
|
98
|
+
"State:": v.get("state"),
|
|
99
|
+
"Livy Id": v.get("livyId"),
|
|
100
|
+
"Origin": v.get("origin"),
|
|
101
|
+
"Attempt Number": v.get("attemptNumber"),
|
|
102
|
+
"Max Number Of Attempts": v.get("maxNumberOfAttempts"),
|
|
103
|
+
"Livy Name": v.get("livyName"),
|
|
104
|
+
"Submitter Id": v["submitter"].get("id"),
|
|
105
|
+
"Submitter Type": v["submitter"].get("type"),
|
|
106
|
+
"Item Workspace Id": v["item"].get("workspaceId"),
|
|
107
|
+
"Item Id": v["item"].get("itemId"),
|
|
108
|
+
"Item Reference Type": v["item"].get("referenceType"),
|
|
109
|
+
"Item Name": v.get("itemName"),
|
|
110
|
+
"Item Type": v.get("itemType"),
|
|
111
|
+
"Job Type": v.get("jobType"),
|
|
112
|
+
"Submitted Date Time": v.get("submittedDateTime"),
|
|
113
|
+
"Start Date Time": v.get("startDateTime"),
|
|
114
|
+
"End Date Time": v.get("endDateTime"),
|
|
115
|
+
"Queued Duration Value": queued_duration.get("value"),
|
|
116
|
+
"Queued Duration Time Unit": queued_duration.get("timeUnit"),
|
|
117
|
+
"Running Duration Value": running_duration.get("value"),
|
|
118
|
+
"Running Duration Time Unit": running_duration.get("timeUnit"),
|
|
119
|
+
"Total Duration Value": total_duration.get("value"),
|
|
120
|
+
"Total Duration Time Unit": total_duration.get("timeUnit"),
|
|
121
|
+
"Job Instance Id": v.get("jobInstanceId"),
|
|
122
|
+
"Creator Item Workspace Id": v["creatorItem"].get("workspaceId"),
|
|
123
|
+
"Creator Item Id": v["creatorItem"].get("itemId"),
|
|
124
|
+
"Creator Item Reference Type": v["creatorItem"].get(
|
|
125
|
+
"referenceType"
|
|
126
|
+
),
|
|
127
|
+
"Creator Item Name": v.get("creatorItemName"),
|
|
128
|
+
"Creator Item Type": v.get("creatorItemType"),
|
|
129
|
+
"Cancellation Reason": v.get("cancellationReason"),
|
|
130
|
+
"Capacity Id": v.get("capacityId"),
|
|
131
|
+
"Operation Name": v.get("operationName"),
|
|
132
|
+
"Runtime Version": v.get("runtimeVersion"),
|
|
133
|
+
"Livy Session Item Resource Uri": v.get(
|
|
134
|
+
"livySessionItemResourceUri"
|
|
135
|
+
),
|
|
136
|
+
}
|
|
137
|
+
)
|
|
133
138
|
|
|
134
|
-
if
|
|
135
|
-
df = pd.
|
|
139
|
+
if rows:
|
|
140
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
136
141
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
137
142
|
|
|
138
143
|
return df
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
3
|
+
from .._helper_functions import (
|
|
4
4
|
resolve_lakehouse_name_and_id,
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_base_api,
|
|
@@ -371,7 +371,7 @@ def list_shortcuts(
|
|
|
371
371
|
"S3Compatible": "s3Compatible",
|
|
372
372
|
}
|
|
373
373
|
|
|
374
|
-
|
|
374
|
+
rows = []
|
|
375
375
|
for r in responses:
|
|
376
376
|
for i in r.get("value", []):
|
|
377
377
|
tgt = i.get("target", {})
|
|
@@ -402,25 +402,26 @@ def list_shortcuts(
|
|
|
402
402
|
source_item_type = dfI_filt["Type"].iloc[0]
|
|
403
403
|
source_item_name = dfI_filt["Display Name"].iloc[0]
|
|
404
404
|
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
405
|
+
rows.append(
|
|
406
|
+
{
|
|
407
|
+
"Shortcut Name": i.get("name"),
|
|
408
|
+
"Shortcut Path": i.get("path"),
|
|
409
|
+
"Source Type": tgt_type,
|
|
410
|
+
"Source Workspace Id": source_workspace_id,
|
|
411
|
+
"Source Workspace Name": source_workspace_name,
|
|
412
|
+
"Source Item Id": source_item_id,
|
|
413
|
+
"Source Item Name": source_item_name,
|
|
414
|
+
"Source Item Type": source_item_type,
|
|
415
|
+
"OneLake Path": tgt.get(sources.get("oneLake"), {}).get("path"),
|
|
416
|
+
"Connection Id": connection_id,
|
|
417
|
+
"Location": location,
|
|
418
|
+
"Bucket": bucket,
|
|
419
|
+
"SubPath": sub_path,
|
|
420
|
+
"Source Properties Raw": str(tgt),
|
|
421
|
+
}
|
|
422
|
+
)
|
|
422
423
|
|
|
423
|
-
if
|
|
424
|
-
df = pd.
|
|
424
|
+
if rows:
|
|
425
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
425
426
|
|
|
426
427
|
return df
|
sempy_labs/migration/__init__.py
CHANGED
|
@@ -1,24 +1,24 @@
|
|
|
1
|
-
from
|
|
2
|
-
from
|
|
1
|
+
from ._create_pqt_file import create_pqt_file
|
|
2
|
+
from ._migrate_calctables_to_lakehouse import (
|
|
3
3
|
migrate_calc_tables_to_lakehouse,
|
|
4
4
|
migrate_field_parameters,
|
|
5
5
|
)
|
|
6
|
-
from
|
|
6
|
+
from ._migrate_calctables_to_semantic_model import (
|
|
7
7
|
migrate_calc_tables_to_semantic_model,
|
|
8
8
|
)
|
|
9
|
-
from
|
|
9
|
+
from ._migrate_model_objects_to_semantic_model import (
|
|
10
10
|
migrate_model_objects_to_semantic_model,
|
|
11
11
|
)
|
|
12
|
-
from
|
|
12
|
+
from ._migrate_tables_columns_to_semantic_model import (
|
|
13
13
|
migrate_tables_columns_to_semantic_model,
|
|
14
14
|
)
|
|
15
|
-
from
|
|
15
|
+
from ._migration_validation import (
|
|
16
16
|
migration_validation,
|
|
17
17
|
)
|
|
18
|
-
from
|
|
18
|
+
from ._refresh_calc_tables import (
|
|
19
19
|
refresh_calc_tables,
|
|
20
20
|
)
|
|
21
|
-
from
|
|
21
|
+
from ._direct_lake_to_import import (
|
|
22
22
|
migrate_direct_lake_to_import,
|
|
23
23
|
)
|
|
24
24
|
|
|
@@ -2,12 +2,12 @@ import sempy
|
|
|
2
2
|
import json
|
|
3
3
|
import os
|
|
4
4
|
import shutil
|
|
5
|
-
from
|
|
5
|
+
from ..lakehouse._lakehouse import lakehouse_attached
|
|
6
6
|
from sempy._utils._log import log
|
|
7
7
|
from typing import Optional
|
|
8
8
|
import sempy_labs._icons as icons
|
|
9
9
|
from uuid import UUID
|
|
10
|
-
from
|
|
10
|
+
from .._helper_functions import (
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
12
12
|
resolve_workspace_name_and_id,
|
|
13
13
|
)
|
|
@@ -2,15 +2,15 @@ import sempy
|
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
3
|
import pandas as pd
|
|
4
4
|
import re
|
|
5
|
-
from
|
|
6
|
-
from
|
|
5
|
+
from ..lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
6
|
+
from .._helper_functions import (
|
|
7
7
|
resolve_lakehouse_name,
|
|
8
8
|
resolve_lakehouse_id,
|
|
9
9
|
retry,
|
|
10
10
|
generate_guid,
|
|
11
11
|
save_as_delta_table,
|
|
12
12
|
)
|
|
13
|
-
from
|
|
13
|
+
from ..tom import connect_semantic_model
|
|
14
14
|
from typing import Optional
|
|
15
15
|
from sempy._utils._log import log
|
|
16
16
|
import sempy_labs._icons as icons
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import re
|
|
3
|
-
import
|
|
4
|
-
from
|
|
5
|
-
from sempy_labs._helper_functions import (
|
|
3
|
+
from ..lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
4
|
+
from .._helper_functions import (
|
|
6
5
|
resolve_lakehouse_name,
|
|
7
6
|
format_dax_object_name,
|
|
8
7
|
retry,
|
|
9
8
|
)
|
|
10
|
-
from
|
|
9
|
+
from ..tom import connect_semantic_model
|
|
11
10
|
from typing import Optional
|
|
12
11
|
from sempy._utils._log import log
|
|
13
12
|
import sempy_labs._icons as icons
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
3
|
import re
|
|
4
|
-
from
|
|
4
|
+
from .._helper_functions import (
|
|
5
5
|
create_relationship_name,
|
|
6
6
|
retry,
|
|
7
7
|
format_dax_object_name,
|
|
8
8
|
)
|
|
9
|
-
from
|
|
9
|
+
from ..tom import connect_semantic_model
|
|
10
10
|
from typing import Optional
|
|
11
11
|
from sempy._utils._log import log
|
|
12
12
|
import sempy_labs._icons as icons
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
from
|
|
3
|
+
from ..directlake._generate_shared_expression import generate_shared_expression
|
|
4
|
+
from .._helper_functions import resolve_lakehouse_name, retry
|
|
5
|
+
from ..lakehouse._lakehouse import lakehouse_attached
|
|
6
|
+
from ..tom import connect_semantic_model
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
import sempy_labs._icons as icons
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
|
-
from
|
|
3
|
+
from .._list_functions import list_semantic_model_objects
|
|
5
4
|
from sempy._utils._log import log
|
|
6
5
|
import sempy_labs._icons as icons
|
|
7
6
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import re
|
|
4
|
-
from
|
|
4
|
+
from ..tom import connect_semantic_model
|
|
5
5
|
from typing import Optional
|
|
6
6
|
from sempy._utils._log import log
|
|
7
7
|
import sempy_labs._icons as icons
|
|
8
8
|
from uuid import UUID
|
|
9
|
-
from
|
|
9
|
+
from .._helper_functions import (
|
|
10
10
|
resolve_workspace_name_and_id,
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
12
12
|
save_as_delta_table,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._refresh_catalog_metadata import (
|
|
2
2
|
refresh_catalog_metadata,
|
|
3
3
|
)
|
|
4
|
-
from
|
|
4
|
+
from ._discover import (
|
|
5
5
|
discover_catalogs,
|
|
6
6
|
discover_schemas,
|
|
7
7
|
discover_tables,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from .._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
@@ -54,20 +54,20 @@ def discover_catalogs(
|
|
|
54
54
|
|
|
55
55
|
df = _create_dataframe(columns=columns)
|
|
56
56
|
|
|
57
|
-
|
|
57
|
+
rows = []
|
|
58
58
|
for r in responses:
|
|
59
59
|
for i in r.get("value", []):
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
if
|
|
70
|
-
df = pd.
|
|
60
|
+
rows.append(
|
|
61
|
+
{
|
|
62
|
+
"Catalog Name": i.get("name"),
|
|
63
|
+
"Catalog Full Name": i.get("fullName"),
|
|
64
|
+
"Catalog Type": i.get("catalogType"),
|
|
65
|
+
"Storage Location": i.get("storageLocation"),
|
|
66
|
+
}
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
if rows:
|
|
70
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
71
71
|
|
|
72
72
|
return df
|
|
73
73
|
|
|
@@ -120,20 +120,20 @@ def discover_schemas(
|
|
|
120
120
|
|
|
121
121
|
df = _create_dataframe(columns=columns)
|
|
122
122
|
|
|
123
|
-
|
|
123
|
+
rows = []
|
|
124
124
|
for r in responses:
|
|
125
125
|
for i in r.get("value", []):
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
if
|
|
136
|
-
df = pd.
|
|
126
|
+
rows.append(
|
|
127
|
+
{
|
|
128
|
+
"Catalog Name": catalog,
|
|
129
|
+
"Schema Name": i.get("name"),
|
|
130
|
+
"Schema Full Name": i.get("fullName"),
|
|
131
|
+
"Storage Location": i.get("storageLocation"),
|
|
132
|
+
}
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
if rows:
|
|
136
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
137
137
|
|
|
138
138
|
return df
|
|
139
139
|
|
|
@@ -192,22 +192,22 @@ def discover_tables(
|
|
|
192
192
|
|
|
193
193
|
df = _create_dataframe(columns=columns)
|
|
194
194
|
|
|
195
|
-
|
|
195
|
+
rows = []
|
|
196
196
|
for r in responses:
|
|
197
197
|
for i in r.get("value", []):
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
if
|
|
211
|
-
df = pd.
|
|
198
|
+
rows.append(
|
|
199
|
+
{
|
|
200
|
+
"Catalog Name": catalog,
|
|
201
|
+
"Schema Name": schema,
|
|
202
|
+
"Table Name": i.get("name"),
|
|
203
|
+
"Table Full Name": i.get("fullName"),
|
|
204
|
+
"Storage Location": i.get("storageLocation"),
|
|
205
|
+
"Table Type": i.get("tableType"),
|
|
206
|
+
"Data Source Format": i.get("dataSourceFormat"),
|
|
207
|
+
}
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
if rows:
|
|
211
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
212
212
|
|
|
213
213
|
return df
|