semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/METADATA +7 -6
- semantic_link_labs-0.11.3.dist-info/RECORD +212 -0
- sempy_labs/__init__.py +65 -71
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +78 -23
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_helper_functions.py +169 -5
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +151 -2
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +84 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +109 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +38 -1
- sempy_labs/lakehouse/_lakehouse.py +16 -7
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +35 -44
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +9 -20
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -9
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +11 -20
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +248 -250
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +19 -6
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +13 -8
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -110
- sempy_labs/_variable_libraries.py +0 -91
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/top_level.txt +0 -0
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import re
|
|
3
|
-
import datetime
|
|
4
3
|
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
5
4
|
from sempy_labs._helper_functions import (
|
|
6
|
-
resolve_lakehouse_name,
|
|
7
5
|
format_dax_object_name,
|
|
8
6
|
retry,
|
|
9
7
|
)
|
|
@@ -11,16 +9,17 @@ from sempy_labs.tom import connect_semantic_model
|
|
|
11
9
|
from typing import Optional
|
|
12
10
|
from sempy._utils._log import log
|
|
13
11
|
import sempy_labs._icons as icons
|
|
12
|
+
from uuid import UUID
|
|
14
13
|
|
|
15
14
|
|
|
16
15
|
@log
|
|
17
16
|
def migrate_calc_tables_to_semantic_model(
|
|
18
17
|
dataset: str,
|
|
19
18
|
new_dataset: str,
|
|
20
|
-
workspace: Optional[str] = None,
|
|
21
|
-
new_dataset_workspace: Optional[str] = None,
|
|
22
|
-
lakehouse: Optional[str] = None,
|
|
23
|
-
lakehouse_workspace: Optional[str] = None,
|
|
19
|
+
workspace: Optional[str | UUID] = None,
|
|
20
|
+
new_dataset_workspace: Optional[str | UUID] = None,
|
|
21
|
+
lakehouse: Optional[str | UUID] = None,
|
|
22
|
+
lakehouse_workspace: Optional[str | UUID] = None,
|
|
24
23
|
):
|
|
25
24
|
"""
|
|
26
25
|
Creates new tables in the Direct Lake semantic model based on the lakehouse tables created using the 'migrate_calc_tables_to_lakehouse' function.
|
|
@@ -31,18 +30,18 @@ def migrate_calc_tables_to_semantic_model(
|
|
|
31
30
|
Name of the import/DirectQuery semantic model.
|
|
32
31
|
new_dataset : str
|
|
33
32
|
Name of the Direct Lake semantic model.
|
|
34
|
-
workspace : str, default=None
|
|
33
|
+
workspace : str | uuid.UUID, default=None
|
|
35
34
|
The Fabric workspace name in which the import/DirectQuery semantic model exists.
|
|
36
35
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
37
36
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
38
|
-
new_dataset_workspace : str
|
|
37
|
+
new_dataset_workspace : str | uuid.UUID, default=None
|
|
39
38
|
The Fabric workspace name in which the Direct Lake semantic model will be created.
|
|
40
39
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
41
40
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
42
|
-
lakehouse : str, default=None
|
|
41
|
+
lakehouse : str | uuid.UUID, default=None
|
|
43
42
|
The Fabric lakehouse used by the Direct Lake semantic model.
|
|
44
43
|
Defaults to None which resolves to the lakehouse attached to the notebook.
|
|
45
|
-
lakehouse_workspace : str, default=None
|
|
44
|
+
lakehouse_workspace : str | uuid.UUID, default=None
|
|
46
45
|
The Fabric workspace used by the lakehouse.
|
|
47
46
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
48
47
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
@@ -53,18 +52,8 @@ def migrate_calc_tables_to_semantic_model(
|
|
|
53
52
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
54
53
|
)
|
|
55
54
|
|
|
56
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
57
55
|
fabric.refresh_tom_cache(workspace=workspace)
|
|
58
56
|
|
|
59
|
-
if new_dataset_workspace is None:
|
|
60
|
-
new_dataset_workspace = workspace
|
|
61
|
-
|
|
62
|
-
if lakehouse_workspace is None:
|
|
63
|
-
lakehouse_workspace = new_dataset_workspace
|
|
64
|
-
if lakehouse is None:
|
|
65
|
-
lakehouse_id = fabric.get_lakehouse_id()
|
|
66
|
-
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
67
|
-
|
|
68
57
|
icons.sll_tags.append("DirectLakeMigration")
|
|
69
58
|
|
|
70
59
|
# Get calc tables but not field parameters
|
|
@@ -10,14 +10,15 @@ from sempy_labs.tom import connect_semantic_model
|
|
|
10
10
|
from typing import Optional
|
|
11
11
|
from sempy._utils._log import log
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
|
+
from uuid import UUID
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
@log
|
|
16
17
|
def migrate_model_objects_to_semantic_model(
|
|
17
18
|
dataset: str,
|
|
18
19
|
new_dataset: str,
|
|
19
|
-
workspace: Optional[str] = None,
|
|
20
|
-
new_dataset_workspace: Optional[str] = None,
|
|
20
|
+
workspace: Optional[str | UUID] = None,
|
|
21
|
+
new_dataset_workspace: Optional[str | UUID] = None,
|
|
21
22
|
):
|
|
22
23
|
"""
|
|
23
24
|
Adds the rest of the model objects (besides tables/columns) and their properties to a Direct Lake semantic model based on an import/DirectQuery semantic model.
|
|
@@ -28,11 +29,11 @@ def migrate_model_objects_to_semantic_model(
|
|
|
28
29
|
Name of the import/DirectQuery semantic model.
|
|
29
30
|
new_dataset : str
|
|
30
31
|
Name of the Direct Lake semantic model.
|
|
31
|
-
workspace : str, default=None
|
|
32
|
+
workspace : str | uuid.UUID, default=None
|
|
32
33
|
The Fabric workspace name in which the import/DirectQuery semantic model exists.
|
|
33
34
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
34
35
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
35
|
-
new_dataset_workspace : str
|
|
36
|
+
new_dataset_workspace : str | uuid.UUID, default=None
|
|
36
37
|
The Fabric workspace name in which the Direct Lake semantic model will be created.
|
|
37
38
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
38
39
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
@@ -47,12 +48,7 @@ def migrate_model_objects_to_semantic_model(
|
|
|
47
48
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
48
49
|
)
|
|
49
50
|
|
|
50
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
51
51
|
fabric.refresh_tom_cache(workspace=workspace)
|
|
52
|
-
|
|
53
|
-
if new_dataset_workspace is None:
|
|
54
|
-
new_dataset_workspace = workspace
|
|
55
|
-
|
|
56
52
|
icons.sll_tags.append("DirectLakeMigration")
|
|
57
53
|
|
|
58
54
|
dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
|
|
@@ -1,22 +1,23 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
from sempy_labs.directlake._generate_shared_expression import generate_shared_expression
|
|
4
|
-
from sempy_labs._helper_functions import
|
|
4
|
+
from sempy_labs._helper_functions import retry
|
|
5
5
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
6
6
|
from sempy_labs.tom import connect_semantic_model
|
|
7
7
|
from typing import Optional
|
|
8
8
|
from sempy._utils._log import log
|
|
9
9
|
import sempy_labs._icons as icons
|
|
10
|
+
from uuid import UUID
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
@log
|
|
13
14
|
def migrate_tables_columns_to_semantic_model(
|
|
14
15
|
dataset: str,
|
|
15
16
|
new_dataset: str,
|
|
16
|
-
workspace: Optional[str] = None,
|
|
17
|
-
new_dataset_workspace: Optional[str] = None,
|
|
18
|
-
lakehouse: Optional[str] = None,
|
|
19
|
-
lakehouse_workspace: Optional[str] = None,
|
|
17
|
+
workspace: Optional[str | UUID] = None,
|
|
18
|
+
new_dataset_workspace: Optional[str | UUID] = None,
|
|
19
|
+
lakehouse: Optional[str | UUID] = None,
|
|
20
|
+
lakehouse_workspace: Optional[str | UUID] = None,
|
|
20
21
|
):
|
|
21
22
|
"""
|
|
22
23
|
Adds tables/columns to the new Direct Lake semantic model based on an import/DirectQuery semantic model.
|
|
@@ -49,18 +50,6 @@ def migrate_tables_columns_to_semantic_model(
|
|
|
49
50
|
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters are both set to '{dataset}'. These parameters must be set to different values."
|
|
50
51
|
)
|
|
51
52
|
|
|
52
|
-
workspace = fabric.resolve_workspace_name(workspace)
|
|
53
|
-
|
|
54
|
-
if new_dataset_workspace is None:
|
|
55
|
-
new_dataset_workspace = workspace
|
|
56
|
-
|
|
57
|
-
if lakehouse_workspace is None:
|
|
58
|
-
lakehouse_workspace = new_dataset_workspace
|
|
59
|
-
|
|
60
|
-
if lakehouse is None:
|
|
61
|
-
lakehouse_id = fabric.get_lakehouse_id()
|
|
62
|
-
lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
|
|
63
|
-
|
|
64
53
|
icons.sll_tags.append("DirectLakeMigration")
|
|
65
54
|
|
|
66
55
|
# Check that lakehouse is attached to the notebook
|
|
@@ -72,11 +61,13 @@ def migrate_tables_columns_to_semantic_model(
|
|
|
72
61
|
"\nLearn more here: https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse"
|
|
73
62
|
)
|
|
74
63
|
shEx = generate_shared_expression(
|
|
75
|
-
item_name=lakehouse,
|
|
64
|
+
item_name=lakehouse,
|
|
65
|
+
item_type="Lakehouse",
|
|
66
|
+
workspace=lakehouse_workspace,
|
|
67
|
+
use_sql_endpoint=False,
|
|
76
68
|
)
|
|
77
69
|
|
|
78
70
|
fabric.refresh_tom_cache(workspace=workspace)
|
|
79
|
-
|
|
80
71
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
81
72
|
dfT = fabric.list_tables(dataset=dataset, workspace=workspace)
|
|
82
73
|
dfT.rename(columns={"Type": "Table Type"}, inplace=True)
|
|
@@ -136,7 +127,7 @@ def migrate_tables_columns_to_semantic_model(
|
|
|
136
127
|
tDC = r["Data Category"]
|
|
137
128
|
tHid = bool(r["Hidden"])
|
|
138
129
|
tDesc = r["Description"]
|
|
139
|
-
ent_name = tName.replace(" ", "_")
|
|
130
|
+
ent_name = tName # .replace(" ", "_")
|
|
140
131
|
for char in icons.special_characters:
|
|
141
132
|
ent_name = ent_name.replace(char, "")
|
|
142
133
|
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
|
-
from
|
|
3
|
+
from .._list_functions import list_semantic_model_objects
|
|
5
4
|
from sempy._utils._log import log
|
|
6
5
|
import sempy_labs._icons as icons
|
|
7
6
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
3
|
import re
|
|
4
|
-
from
|
|
4
|
+
from ..tom import connect_semantic_model
|
|
5
5
|
from typing import Optional
|
|
6
6
|
from sempy._utils._log import log
|
|
7
7
|
import sempy_labs._icons as icons
|
|
8
8
|
from uuid import UUID
|
|
9
|
-
from
|
|
9
|
+
from .._helper_functions import (
|
|
10
10
|
resolve_workspace_name_and_id,
|
|
11
11
|
resolve_dataset_name_and_id,
|
|
12
12
|
save_as_delta_table,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
from
|
|
1
|
+
from ._refresh_catalog_metadata import (
|
|
2
2
|
refresh_catalog_metadata,
|
|
3
3
|
)
|
|
4
|
-
from
|
|
4
|
+
from ._discover import (
|
|
5
5
|
discover_catalogs,
|
|
6
6
|
discover_schemas,
|
|
7
7
|
discover_tables,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from uuid import UUID
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from .._helper_functions import (
|
|
4
4
|
resolve_workspace_id,
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
@@ -54,20 +54,20 @@ def discover_catalogs(
|
|
|
54
54
|
|
|
55
55
|
df = _create_dataframe(columns=columns)
|
|
56
56
|
|
|
57
|
-
|
|
57
|
+
rows = []
|
|
58
58
|
for r in responses:
|
|
59
59
|
for i in r.get("value", []):
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
if
|
|
70
|
-
df = pd.
|
|
60
|
+
rows.append(
|
|
61
|
+
{
|
|
62
|
+
"Catalog Name": i.get("name"),
|
|
63
|
+
"Catalog Full Name": i.get("fullName"),
|
|
64
|
+
"Catalog Type": i.get("catalogType"),
|
|
65
|
+
"Storage Location": i.get("storageLocation"),
|
|
66
|
+
}
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
if rows:
|
|
70
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
71
71
|
|
|
72
72
|
return df
|
|
73
73
|
|
|
@@ -120,20 +120,20 @@ def discover_schemas(
|
|
|
120
120
|
|
|
121
121
|
df = _create_dataframe(columns=columns)
|
|
122
122
|
|
|
123
|
-
|
|
123
|
+
rows = []
|
|
124
124
|
for r in responses:
|
|
125
125
|
for i in r.get("value", []):
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
if
|
|
136
|
-
df = pd.
|
|
126
|
+
rows.append(
|
|
127
|
+
{
|
|
128
|
+
"Catalog Name": catalog,
|
|
129
|
+
"Schema Name": i.get("name"),
|
|
130
|
+
"Schema Full Name": i.get("fullName"),
|
|
131
|
+
"Storage Location": i.get("storageLocation"),
|
|
132
|
+
}
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
if rows:
|
|
136
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
137
137
|
|
|
138
138
|
return df
|
|
139
139
|
|
|
@@ -192,22 +192,22 @@ def discover_tables(
|
|
|
192
192
|
|
|
193
193
|
df = _create_dataframe(columns=columns)
|
|
194
194
|
|
|
195
|
-
|
|
195
|
+
rows = []
|
|
196
196
|
for r in responses:
|
|
197
197
|
for i in r.get("value", []):
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
if
|
|
211
|
-
df = pd.
|
|
198
|
+
rows.append(
|
|
199
|
+
{
|
|
200
|
+
"Catalog Name": catalog,
|
|
201
|
+
"Schema Name": schema,
|
|
202
|
+
"Table Name": i.get("name"),
|
|
203
|
+
"Table Full Name": i.get("fullName"),
|
|
204
|
+
"Storage Location": i.get("storageLocation"),
|
|
205
|
+
"Table Type": i.get("tableType"),
|
|
206
|
+
"Data Source Format": i.get("dataSourceFormat"),
|
|
207
|
+
}
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
if rows:
|
|
211
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
212
212
|
|
|
213
213
|
return df
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from ._functions import (
|
|
2
|
+
list_ml_models,
|
|
3
|
+
create_ml_model,
|
|
4
|
+
delete_ml_model,
|
|
5
|
+
activate_ml_model_endpoint_version,
|
|
6
|
+
deactivate_all_ml_model_endpoint_versions,
|
|
7
|
+
deactivate_ml_model_endpoint_version,
|
|
8
|
+
list_ml_model_endpoint_versions,
|
|
9
|
+
score_ml_model_endpoint,
|
|
10
|
+
score_ml_model_endpoint_version,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"list_ml_models",
|
|
15
|
+
"create_ml_model",
|
|
16
|
+
"delete_ml_model",
|
|
17
|
+
"activate_ml_model_endpoint_version",
|
|
18
|
+
"deactivate_all_ml_model_endpoint_versions",
|
|
19
|
+
"deactivate_ml_model_endpoint_version",
|
|
20
|
+
"list_ml_model_endpoint_versions",
|
|
21
|
+
"score_ml_model_endpoint",
|
|
22
|
+
"score_ml_model_endpoint_version",
|
|
23
|
+
]
|