semantic-link-labs 0.11.1__py3-none-any.whl → 0.11.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/METADATA +7 -6
- semantic_link_labs-0.11.3.dist-info/RECORD +212 -0
- sempy_labs/__init__.py +65 -71
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_ai.py +1 -1
- sempy_labs/_capacities.py +2 -2
- sempy_labs/_capacity_migration.py +5 -5
- sempy_labs/_clear_cache.py +1 -1
- sempy_labs/_connections.py +2 -2
- sempy_labs/_dashboards.py +16 -16
- sempy_labs/_data_pipelines.py +1 -1
- sempy_labs/_dataflows.py +101 -26
- sempy_labs/_dax.py +3 -3
- sempy_labs/_delta_analyzer.py +4 -4
- sempy_labs/_delta_analyzer_history.py +1 -1
- sempy_labs/_deployment_pipelines.py +1 -1
- sempy_labs/_environments.py +22 -21
- sempy_labs/_eventhouses.py +12 -11
- sempy_labs/_eventstreams.py +12 -11
- sempy_labs/_external_data_shares.py +78 -23
- sempy_labs/_gateways.py +47 -45
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +1 -1
- sempy_labs/_graphQL.py +12 -11
- sempy_labs/_helper_functions.py +169 -5
- sempy_labs/_job_scheduler.py +56 -54
- sempy_labs/_kql_databases.py +16 -17
- sempy_labs/_kql_querysets.py +12 -11
- sempy_labs/_kusto.py +2 -2
- sempy_labs/_labels.py +126 -0
- sempy_labs/_list_functions.py +2 -2
- sempy_labs/_managed_private_endpoints.py +18 -15
- sempy_labs/_mirrored_databases.py +16 -15
- sempy_labs/_mirrored_warehouses.py +12 -11
- sempy_labs/_ml_experiments.py +11 -10
- sempy_labs/_model_auto_build.py +3 -3
- sempy_labs/_model_bpa.py +5 -5
- sempy_labs/_model_bpa_bulk.py +3 -3
- sempy_labs/_model_dependencies.py +1 -1
- sempy_labs/_mounted_data_factories.py +12 -12
- sempy_labs/_notebooks.py +151 -2
- sempy_labs/_one_lake_integration.py +1 -1
- sempy_labs/_query_scale_out.py +1 -1
- sempy_labs/_refresh_semantic_model.py +1 -1
- sempy_labs/_semantic_models.py +30 -28
- sempy_labs/_spark.py +1 -1
- sempy_labs/_sql.py +1 -1
- sempy_labs/_sql_endpoints.py +12 -11
- sempy_labs/_sqldatabase.py +15 -15
- sempy_labs/_tags.py +11 -10
- sempy_labs/_translations.py +1 -1
- sempy_labs/_user_delegation_key.py +2 -2
- sempy_labs/_vertipaq.py +3 -3
- sempy_labs/_vpax.py +1 -1
- sempy_labs/_warehouses.py +15 -14
- sempy_labs/_workloads.py +1 -1
- sempy_labs/_workspace_identity.py +1 -1
- sempy_labs/_workspaces.py +14 -13
- sempy_labs/admin/__init__.py +18 -18
- sempy_labs/admin/_activities.py +46 -46
- sempy_labs/admin/_apps.py +28 -26
- sempy_labs/admin/_artifacts.py +15 -15
- sempy_labs/admin/_basic_functions.py +1 -2
- sempy_labs/admin/_capacities.py +84 -82
- sempy_labs/admin/_dataflows.py +2 -2
- sempy_labs/admin/_datasets.py +50 -48
- sempy_labs/admin/_domains.py +25 -19
- sempy_labs/admin/_external_data_share.py +24 -22
- sempy_labs/admin/_git.py +17 -17
- sempy_labs/admin/_items.py +47 -45
- sempy_labs/admin/_reports.py +61 -58
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +18 -18
- sempy_labs/admin/_tags.py +2 -2
- sempy_labs/admin/_tenant.py +57 -51
- sempy_labs/admin/_users.py +16 -15
- sempy_labs/admin/_workspaces.py +2 -2
- sempy_labs/directlake/__init__.py +12 -12
- sempy_labs/directlake/_directlake_schema_compare.py +3 -3
- sempy_labs/directlake/_directlake_schema_sync.py +9 -7
- sempy_labs/directlake/_dl_helper.py +5 -2
- sempy_labs/directlake/_generate_shared_expression.py +1 -1
- sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
- sempy_labs/directlake/_guardrails.py +1 -1
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -3
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- sempy_labs/directlake/_update_directlake_partition_entity.py +4 -4
- sempy_labs/directlake/_warm_cache.py +3 -3
- sempy_labs/graph/__init__.py +3 -3
- sempy_labs/graph/_groups.py +81 -78
- sempy_labs/graph/_teams.py +21 -21
- sempy_labs/graph/_users.py +109 -10
- sempy_labs/lakehouse/__init__.py +7 -7
- sempy_labs/lakehouse/_blobs.py +30 -30
- sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +29 -27
- sempy_labs/lakehouse/_helper.py +38 -1
- sempy_labs/lakehouse/_lakehouse.py +16 -7
- sempy_labs/lakehouse/_livy_sessions.py +47 -42
- sempy_labs/lakehouse/_shortcuts.py +22 -21
- sempy_labs/migration/__init__.py +8 -8
- sempy_labs/migration/_create_pqt_file.py +2 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +35 -44
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +9 -20
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -9
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +11 -20
- sempy_labs/migration/_migration_validation.py +1 -2
- sempy_labs/migration/_refresh_calc_tables.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +2 -2
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +40 -40
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +1 -1
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/__init__.py +10 -10
- sempy_labs/report/_download_report.py +2 -2
- sempy_labs/report/_export_report.py +2 -2
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_paginated.py +1 -1
- sempy_labs/report/_report_bpa.py +4 -3
- sempy_labs/report/_report_functions.py +3 -3
- sempy_labs/report/_report_list_functions.py +3 -3
- sempy_labs/report/_report_rebind.py +1 -1
- sempy_labs/report/_reportwrapper.py +248 -250
- sempy_labs/report/_save_report.py +3 -3
- sempy_labs/theme/_org_themes.py +19 -6
- sempy_labs/tom/__init__.py +1 -1
- sempy_labs/tom/_model.py +13 -8
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- semantic_link_labs-0.11.1.dist-info/RECORD +0 -210
- sempy_labs/_dax_query_view.py +0 -57
- sempy_labs/_ml_models.py +0 -110
- sempy_labs/_variable_libraries.py +0 -91
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.11.1.dist-info → semantic_link_labs-0.11.3.dist-info}/top_level.txt +0 -0
sempy_labs/_a_lib_info.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
lib_name = "semanticlinklabs"
|
|
2
|
-
lib_version = "0.11.
|
|
2
|
+
lib_version = "0.11.3"
|
sempy_labs/_ai.py
CHANGED
|
@@ -4,7 +4,7 @@ import pandas as pd
|
|
|
4
4
|
from typing import List, Optional, Union
|
|
5
5
|
from IPython.display import display
|
|
6
6
|
import sempy_labs._icons as icons
|
|
7
|
-
from
|
|
7
|
+
from .._helper_functions import (
|
|
8
8
|
_read_delta_table,
|
|
9
9
|
_run_spark_sql_query,
|
|
10
10
|
)
|
sempy_labs/_capacities.py
CHANGED
|
@@ -4,12 +4,12 @@ import sempy_labs._icons as icons
|
|
|
4
4
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
5
5
|
import requests
|
|
6
6
|
import pandas as pd
|
|
7
|
-
from
|
|
7
|
+
from ._authentication import (
|
|
8
8
|
_get_headers,
|
|
9
9
|
ServicePrincipalTokenProvider,
|
|
10
10
|
)
|
|
11
11
|
from uuid import UUID
|
|
12
|
-
from
|
|
12
|
+
from ._helper_functions import (
|
|
13
13
|
_is_valid_uuid,
|
|
14
14
|
_update_dataframe_datatypes,
|
|
15
15
|
_base_api,
|
|
@@ -2,20 +2,20 @@ import sempy.fabric as fabric
|
|
|
2
2
|
from typing import Optional, List
|
|
3
3
|
from sempy._utils._log import log
|
|
4
4
|
import sempy_labs._icons as icons
|
|
5
|
-
from
|
|
6
|
-
from
|
|
5
|
+
from ._workspaces import assign_workspace_to_capacity
|
|
6
|
+
from .admin import (
|
|
7
7
|
assign_workspaces_to_capacity,
|
|
8
8
|
)
|
|
9
|
-
from
|
|
9
|
+
from .admin._capacities import (
|
|
10
10
|
_list_capacities_meta,
|
|
11
11
|
list_capacities,
|
|
12
12
|
)
|
|
13
|
-
from
|
|
13
|
+
from ._helper_functions import (
|
|
14
14
|
resolve_capacity_id,
|
|
15
15
|
convert_to_alphanumeric_lowercase,
|
|
16
16
|
_base_api,
|
|
17
17
|
)
|
|
18
|
-
from
|
|
18
|
+
from ._capacities import create_fabric_capacity
|
|
19
19
|
from uuid import UUID
|
|
20
20
|
|
|
21
21
|
|
sempy_labs/_clear_cache.py
CHANGED
sempy_labs/_connections.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
_is_valid_uuid,
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
_update_dataframe_datatypes,
|
|
@@ -10,7 +10,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
)
|
|
11
11
|
from uuid import UUID
|
|
12
12
|
import sempy_labs._icons as icons
|
|
13
|
-
from
|
|
13
|
+
from ._gateways import _resolve_gateway_id
|
|
14
14
|
from sempy._utils._log import log
|
|
15
15
|
|
|
16
16
|
|
sempy_labs/_dashboards.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from typing import Optional
|
|
2
2
|
from uuid import UUID
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_base_api,
|
|
7
7
|
resolve_workspace_id,
|
|
@@ -44,22 +44,22 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
44
44
|
|
|
45
45
|
response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
|
|
46
46
|
|
|
47
|
-
|
|
47
|
+
rows = []
|
|
48
48
|
for v in response.json().get("value", []):
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
if
|
|
62
|
-
df = pd.
|
|
49
|
+
rows.append(
|
|
50
|
+
{
|
|
51
|
+
"Dashboard ID": v.get("id"),
|
|
52
|
+
"Dashboard Name": v.get("displayName"),
|
|
53
|
+
"Read Only": v.get("isReadOnly"),
|
|
54
|
+
"Web URL": v.get("webUrl"),
|
|
55
|
+
"Embed URL": v.get("embedUrl"),
|
|
56
|
+
"Data Classification": v.get("dataClassification"),
|
|
57
|
+
"Users": v.get("users"),
|
|
58
|
+
"Subscriptions": v.get("subscriptions"),
|
|
59
|
+
}
|
|
60
|
+
)
|
|
61
|
+
if rows:
|
|
62
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
63
63
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
64
64
|
|
|
65
65
|
return df
|
sempy_labs/_data_pipelines.py
CHANGED
sempy_labs/_dataflows.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
|
-
from
|
|
2
|
+
from ._helper_functions import (
|
|
3
3
|
resolve_workspace_name_and_id,
|
|
4
4
|
_is_valid_uuid,
|
|
5
5
|
_update_dataframe_datatypes,
|
|
@@ -51,18 +51,19 @@ def list_dataflows(workspace: Optional[str | UUID] = None):
|
|
|
51
51
|
request=f"/v1.0/myorg/groups/{workspace_id}/dataflows", client="fabric_sp"
|
|
52
52
|
)
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
rows = []
|
|
55
55
|
for v in response.json().get("value", []):
|
|
56
56
|
gen = v.get("generation")
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
57
|
+
rows.append(
|
|
58
|
+
{
|
|
59
|
+
"Dataflow Id": v.get("objectId"),
|
|
60
|
+
"Dataflow Name": v.get("name"),
|
|
61
|
+
"Description": "",
|
|
62
|
+
"Configured By": v.get("configuredBy"),
|
|
63
|
+
"Users": ", ".join(v.get("users", [])),
|
|
64
|
+
"Generation": "Gen2" if gen == 2 else "Gen1",
|
|
65
|
+
}
|
|
66
|
+
)
|
|
66
67
|
|
|
67
68
|
responses = _base_api(
|
|
68
69
|
request=f"/v1/workspaces/{workspace_id}/dataflows",
|
|
@@ -72,18 +73,19 @@ def list_dataflows(workspace: Optional[str | UUID] = None):
|
|
|
72
73
|
for r in responses:
|
|
73
74
|
for v in r.get("value", []):
|
|
74
75
|
gen = v.get("generation")
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
76
|
+
rows.append(
|
|
77
|
+
{
|
|
78
|
+
"Dataflow Id": v.get("id"),
|
|
79
|
+
"Dataflow Name": v.get("displayName"),
|
|
80
|
+
"Description": v.get("description"),
|
|
81
|
+
"Configured By": "",
|
|
82
|
+
"Users": "",
|
|
83
|
+
"Generation": "Gen2 CI/CD",
|
|
84
|
+
}
|
|
85
|
+
)
|
|
84
86
|
|
|
85
|
-
if
|
|
86
|
-
df = pd.
|
|
87
|
+
if rows:
|
|
88
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
87
89
|
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
88
90
|
|
|
89
91
|
return df
|
|
@@ -414,6 +416,32 @@ def upgrade_dataflow(
|
|
|
414
416
|
)
|
|
415
417
|
query_groups_value = json.loads(matches[0].value) if matches else []
|
|
416
418
|
|
|
419
|
+
queries_metadata = get_jsonpath_value(
|
|
420
|
+
data=definition, path="$['pbi:mashup'].queriesMetadata"
|
|
421
|
+
)
|
|
422
|
+
|
|
423
|
+
default_staging = True if "DefaultStaging" in queries_metadata else False
|
|
424
|
+
|
|
425
|
+
# Collect keys to delete
|
|
426
|
+
keys_to_delete = [
|
|
427
|
+
key
|
|
428
|
+
for key in queries_metadata
|
|
429
|
+
if key.endswith("_DataDestination")
|
|
430
|
+
or key.endswith("_WriteToDataDestination")
|
|
431
|
+
or key.endswith("_TransformForWriteToDataDestination")
|
|
432
|
+
or key == "FastCopyStaging"
|
|
433
|
+
]
|
|
434
|
+
|
|
435
|
+
# Delete them
|
|
436
|
+
for key in keys_to_delete:
|
|
437
|
+
del queries_metadata[key]
|
|
438
|
+
|
|
439
|
+
# Set load enabled and isHidden
|
|
440
|
+
for key, items in queries_metadata.items():
|
|
441
|
+
items["loadEnabled"] = False
|
|
442
|
+
if key in ["DefaultDestination", "DefaultStaging"]:
|
|
443
|
+
items["isHidden"] = True
|
|
444
|
+
|
|
417
445
|
# Prepare the dataflow definition
|
|
418
446
|
query_metadata = {
|
|
419
447
|
"formatVersion": "202502",
|
|
@@ -421,9 +449,7 @@ def upgrade_dataflow(
|
|
|
421
449
|
"name": new_dataflow_name,
|
|
422
450
|
"queryGroups": query_groups_value,
|
|
423
451
|
"documentLocale": get_jsonpath_value(data=definition, path="$.culture"),
|
|
424
|
-
"queriesMetadata":
|
|
425
|
-
data=definition, path="$['pbi:mashup'].queriesMetadata"
|
|
426
|
-
),
|
|
452
|
+
"queriesMetadata": queries_metadata,
|
|
427
453
|
"fastCombine": get_jsonpath_value(
|
|
428
454
|
data=definition, path="$['pbi:mashup'].fastCombine", default=False
|
|
429
455
|
),
|
|
@@ -433,8 +459,57 @@ def upgrade_dataflow(
|
|
|
433
459
|
# "connections": [],
|
|
434
460
|
}
|
|
435
461
|
|
|
462
|
+
fast_copy = get_jsonpath_value(
|
|
463
|
+
data=definition, path="$['ppdf:fastCopy']", default=False
|
|
464
|
+
)
|
|
465
|
+
max_concurrency = get_jsonpath_value(
|
|
466
|
+
data=definition, path="$['ppdf:maxConcurrency']"
|
|
467
|
+
)
|
|
468
|
+
if fast_copy:
|
|
469
|
+
query_metadata["computeEngineSettings"] = {}
|
|
470
|
+
|
|
471
|
+
if max_concurrency:
|
|
472
|
+
query_metadata["computeEngineSettings"]["maxConcurrency"] = max_concurrency
|
|
473
|
+
|
|
436
474
|
mashup_doc = get_jsonpath_value(data=definition, path="$['pbi:mashup'].document")
|
|
437
475
|
|
|
476
|
+
# Remove the FastCopyStaging section if it exists
|
|
477
|
+
new_mashup_doc = ""
|
|
478
|
+
if default_staging and fast_copy:
|
|
479
|
+
new_mashup_doc = '[DefaultOutputDestinationSettings = [DestinationDefinition = [Kind = "Reference", QueryName = "DefaultDestination", IsNewTarget = true], UpdateMethod = [Kind = "Replace"]], StagingDefinition = [Kind = "FastCopy"]]\r\nsection Section1'
|
|
480
|
+
elif default_staging and not fast_copy:
|
|
481
|
+
new_mashup_doc = '[DefaultOutputDestinationSettings = [DestinationDefinition = [Kind = "Reference", QueryName = "DefaultDestination", IsNewTarget = true], UpdateMethod = [Kind = "Replace"]]\r\nsection Section1'
|
|
482
|
+
elif not default_staging and fast_copy:
|
|
483
|
+
new_mashup_doc = '[StagingDefinition = [Kind = "FastCopy"]]\r\nsection Section1'
|
|
484
|
+
else:
|
|
485
|
+
new_mashup_doc = "section Section1"
|
|
486
|
+
for i in mashup_doc.split(";\r\nshared "):
|
|
487
|
+
# if 'IsParameterQuery=true' in i:
|
|
488
|
+
# Add to queries_metadata
|
|
489
|
+
if not (
|
|
490
|
+
"FastCopyStaging = let" in i
|
|
491
|
+
or '_WriteToDataDestination" = let' in i
|
|
492
|
+
or "_WriteToDataDestination = let" in i
|
|
493
|
+
or '_DataDestination" = let' in i
|
|
494
|
+
or "_DataDestination = let" in i
|
|
495
|
+
or '_TransformForWriteToDataDestination" = let' in i
|
|
496
|
+
or "_TransformForWriteToDataDestination = let" in i
|
|
497
|
+
):
|
|
498
|
+
if i != "section Section1":
|
|
499
|
+
if default_staging and (
|
|
500
|
+
"IsParameterQuery=true" not in i
|
|
501
|
+
and not i.startswith("DefaultStaging")
|
|
502
|
+
and not i.startswith("DefaultDestination")
|
|
503
|
+
):
|
|
504
|
+
new_mashup_doc += (
|
|
505
|
+
";\r\n[BindToDefaultDestination = true]\r\nshared " + i
|
|
506
|
+
)
|
|
507
|
+
else:
|
|
508
|
+
new_mashup_doc += ";\r\nshared " + i
|
|
509
|
+
new_mashup_doc = f"{new_mashup_doc};"
|
|
510
|
+
|
|
511
|
+
return new_mashup_doc, query_metadata
|
|
512
|
+
|
|
438
513
|
# Add the dataflow definition to the payload
|
|
439
514
|
new_definition = {
|
|
440
515
|
"parts": [
|
|
@@ -445,7 +520,7 @@ def upgrade_dataflow(
|
|
|
445
520
|
},
|
|
446
521
|
{
|
|
447
522
|
"path": "mashup.pq",
|
|
448
|
-
"payload": _conv_b64(
|
|
523
|
+
"payload": _conv_b64(new_mashup_doc, json_dumps=False),
|
|
449
524
|
"payloadType": "InlineBase64",
|
|
450
525
|
},
|
|
451
526
|
]
|
sempy_labs/_dax.py
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
import sempy.fabric as fabric
|
|
2
2
|
import pandas as pd
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
resolve_workspace_name_and_id,
|
|
5
5
|
format_dax_object_name,
|
|
6
6
|
resolve_dataset_name_and_id,
|
|
7
7
|
_base_api,
|
|
8
8
|
generate_guid,
|
|
9
9
|
)
|
|
10
|
-
from
|
|
10
|
+
from ._model_dependencies import get_model_calc_dependencies
|
|
11
11
|
from typing import Optional, List, Tuple
|
|
12
12
|
from sempy._utils._log import log
|
|
13
13
|
from uuid import UUID
|
|
14
|
-
from
|
|
14
|
+
from .directlake._warm_cache import _put_columns_into_memory
|
|
15
15
|
import sempy_labs._icons as icons
|
|
16
16
|
import time
|
|
17
17
|
|
sempy_labs/_delta_analyzer.py
CHANGED
|
@@ -5,7 +5,7 @@ import os
|
|
|
5
5
|
from uuid import UUID
|
|
6
6
|
from typing import Dict, Optional
|
|
7
7
|
import pyarrow.parquet as pq
|
|
8
|
-
from
|
|
8
|
+
from ._helper_functions import (
|
|
9
9
|
create_abfss_path,
|
|
10
10
|
save_as_delta_table,
|
|
11
11
|
_get_column_aggregate,
|
|
@@ -21,11 +21,11 @@ from sempy_labs._helper_functions import (
|
|
|
21
21
|
_get_delta_table,
|
|
22
22
|
)
|
|
23
23
|
from sempy._utils._log import log
|
|
24
|
-
from
|
|
25
|
-
from
|
|
24
|
+
from .lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
25
|
+
from .lakehouse._lakehouse import (
|
|
26
26
|
lakehouse_attached,
|
|
27
27
|
)
|
|
28
|
-
from
|
|
28
|
+
from .lakehouse._helper import (
|
|
29
29
|
is_v_ordered,
|
|
30
30
|
)
|
|
31
31
|
import sempy_labs._icons as icons
|
sempy_labs/_environments.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
import sempy_labs._icons as icons
|
|
3
3
|
from typing import Optional
|
|
4
|
-
from
|
|
4
|
+
from ._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
6
|
resolve_workspace_id,
|
|
7
7
|
_base_api,
|
|
@@ -88,29 +88,30 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
88
88
|
client="fabric_sp",
|
|
89
89
|
)
|
|
90
90
|
|
|
91
|
-
|
|
91
|
+
rows = []
|
|
92
92
|
for r in responses:
|
|
93
93
|
for v in r.get("value", []):
|
|
94
94
|
pub = v.get("properties", {}).get("publishDetails", {})
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
95
|
+
rows.append(
|
|
96
|
+
{
|
|
97
|
+
"Environment Name": v.get("displayName"),
|
|
98
|
+
"Environment Id": v.get("id"),
|
|
99
|
+
"Description": v.get("description"),
|
|
100
|
+
"Publish State": pub.get("state"),
|
|
101
|
+
"Publish Target Version": pub.get("targetVersion"),
|
|
102
|
+
"Publish Start Time": pub.get("startTime"),
|
|
103
|
+
"Publish End Time": pub.get("endTime"),
|
|
104
|
+
"Spark Libraries State": pub.get("componentPublishInfo", {})
|
|
105
|
+
.get("sparkLibraries", {})
|
|
106
|
+
.get("state"),
|
|
107
|
+
"Spark Settings State": pub.get("componentPublishInfo", {})
|
|
108
|
+
.get("sparkSettings", {})
|
|
109
|
+
.get("state"),
|
|
110
|
+
}
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
if rows:
|
|
114
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
114
115
|
|
|
115
116
|
return df
|
|
116
117
|
|
sempy_labs/_eventhouses.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
_base_api,
|
|
5
5
|
_create_dataframe,
|
|
6
6
|
_conv_b64,
|
|
@@ -103,18 +103,19 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
103
103
|
client="fabric_sp",
|
|
104
104
|
)
|
|
105
105
|
|
|
106
|
-
|
|
106
|
+
rows = []
|
|
107
107
|
for r in responses:
|
|
108
108
|
for v in r.get("value", []):
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
109
|
+
rows.append(
|
|
110
|
+
{
|
|
111
|
+
"Eventhouse Name": v.get("displayName"),
|
|
112
|
+
"Eventhouse Id": v.get("id"),
|
|
113
|
+
"Description": v.get("description"),
|
|
114
|
+
}
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
if rows:
|
|
118
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
118
119
|
|
|
119
120
|
return df
|
|
120
121
|
|
sempy_labs/_eventstreams.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import pandas as pd
|
|
2
2
|
from typing import Optional
|
|
3
|
-
from
|
|
3
|
+
from ._helper_functions import (
|
|
4
4
|
_base_api,
|
|
5
5
|
delete_item,
|
|
6
6
|
_create_dataframe,
|
|
@@ -44,18 +44,19 @@ def list_eventstreams(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
44
44
|
request=f"/v1/workspaces/{workspace_id}/eventstreams", uses_pagination=True
|
|
45
45
|
)
|
|
46
46
|
|
|
47
|
-
|
|
47
|
+
rows = []
|
|
48
48
|
for r in responses:
|
|
49
49
|
for v in r.get("value", []):
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
50
|
+
rows.append(
|
|
51
|
+
{
|
|
52
|
+
"Eventstream Name": v.get("displayName"),
|
|
53
|
+
"Eventstream Id": v.get("id"),
|
|
54
|
+
"Description": v.get("description"),
|
|
55
|
+
}
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
if rows:
|
|
59
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
59
60
|
|
|
60
61
|
return df
|
|
61
62
|
|