semantic-link-labs 0.10.0__tar.gz → 0.10.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.10.0/src/semantic_link_labs.egg-info → semantic_link_labs-0.10.1}/PKG-INFO +5 -3
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/README.md +3 -1
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/pyproject.toml +2 -2
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1/src/semantic_link_labs.egg-info}/PKG-INFO +5 -3
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/semantic_link_labs.egg-info/SOURCES.txt +6 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/semantic_link_labs.egg-info/requires.txt +1 -1
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/__init__.py +7 -1
- semantic_link_labs-0.10.1/src/sempy_labs/_a_lib_info.py +2 -0
- semantic_link_labs-0.10.1/src/sempy_labs/_daxformatter.py +78 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_list_functions.py +0 -43
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_notebooks.py +3 -3
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_semantic_models.py +101 -0
- semantic_link_labs-0.10.1/src/sempy_labs/_sql_endpoints.py +185 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -3
- semantic_link_labs-0.10.1/src/sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- semantic_link_labs-0.10.1/src/sempy_labs/mirrored_azure_databricks_catalog/_discover.py +209 -0
- semantic_link_labs-0.10.1/src/sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +43 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_reportwrapper.py +22 -17
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/tom/_model.py +193 -1
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/LICENSE +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/setup.cfg +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_ai.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_authentication.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_am-ET.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_da-DK.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_de-DE.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_el-GR.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_es-ES.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_he-IL.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_id-ID.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_is-IS.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_it-IT.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_te-IN.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_th-TH.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_capacities.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_capacity_migration.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_clear_cache.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_connections.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_dashboards.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_data_pipelines.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_dataflows.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_dax.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_dax_query_view.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_delta_analyzer.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_delta_analyzer_history.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_deployment_pipelines.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_dictionary_diffs.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_documentation.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_environments.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_eventhouses.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_eventstreams.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_external_data_shares.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_gateways.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_generate_semantic_model.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_git.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_graphQL.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_helper_functions.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_icons.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_job_scheduler.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_kql_databases.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_kql_querysets.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_kusto.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_managed_private_endpoints.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_mirrored_databases.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_mirrored_warehouses.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_ml_experiments.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_ml_models.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_model_auto_build.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_model_bpa.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_model_bpa_bulk.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_model_bpa_rules.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_model_dependencies.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_mounted_data_factories.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_one_lake_integration.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_query_scale_out.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_refresh_semantic_model.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_spark.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_sql.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_sqldatabase.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_tags.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_translations.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_user_delegation_key.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_utils.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_variable_libraries.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_vertipaq.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_vpax.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_warehouses.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_workloads.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_workspace_identity.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/_workspaces.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_activities.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_apps.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_artifacts.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_basic_functions.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_capacities.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_dataflows.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_datasets.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_domains.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_external_data_share.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_git.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_items.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_reports.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_scanner.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_shared.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_tags.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_tenant.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_users.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/admin/_workspaces.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_directlake_schema_compare.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_directlake_schema_sync.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_dl_helper.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_generate_shared_expression.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_get_shared_expression.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_guardrails.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_update_directlake_partition_entity.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/directlake/_warm_cache.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/dotnet_lib/dotnet.runtime.config.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/graph/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/graph/_groups.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/graph/_teams.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/graph/_users.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_blobs.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_helper.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_lakehouse.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_livy_sessions.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/lakehouse/_shortcuts.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_create_pqt_file.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_direct_lake_to_import.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_migration_validation.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/migration/_refresh_calc_tables.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_BPAReportTemplate.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/report.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition/version.json +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_bpareporttemplate/definition.pbir +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_download_report.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_export_report.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_generate_report.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_paginated.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_report_bpa.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_report_bpa_rules.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_report_functions.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_report_helper.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_report_list_functions.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_report_rebind.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/report/_save_report.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/sempy_labs/tom/__init__.py +0 -0
- {semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/tests/test_friendly_case.py +0 -0
{semantic_link_labs-0.10.0/src/semantic_link_labs.egg-info → semantic_link_labs-0.10.1}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: semantic-link-labs
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.1
|
|
4
4
|
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
License: MIT License
|
|
@@ -15,7 +15,7 @@ Classifier: Framework :: Jupyter
|
|
|
15
15
|
Requires-Python: <3.12,>=3.10
|
|
16
16
|
Description-Content-Type: text/markdown
|
|
17
17
|
License-File: LICENSE
|
|
18
|
-
Requires-Dist: semantic-link-sempy>=0.
|
|
18
|
+
Requires-Dist: semantic-link-sempy>=0.11.0
|
|
19
19
|
Requires-Dist: anytree
|
|
20
20
|
Requires-Dist: powerbiclient
|
|
21
21
|
Requires-Dist: polib
|
|
@@ -27,7 +27,7 @@ Dynamic: license-file
|
|
|
27
27
|
# Semantic Link Labs
|
|
28
28
|
|
|
29
29
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
30
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
31
31
|
[](https://github.com/psf/black)
|
|
32
32
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
33
33
|
|
|
@@ -56,6 +56,7 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
|
|
|
56
56
|
* [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
|
|
57
57
|
* [Model Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#model-best-practice-analyzer)
|
|
58
58
|
* [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vertipaq-analyzer)
|
|
59
|
+
* [Create a .vpax file](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#create-a-vpax-file)
|
|
59
60
|
* [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
|
|
60
61
|
* [Translate a semantic model's metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#translate-a-semantic-model)
|
|
61
62
|
* [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
|
|
@@ -154,6 +155,7 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
154
155
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
155
156
|
|
|
156
157
|
## Version History
|
|
158
|
+
* [0.10.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.1) (June 10, 2025)
|
|
157
159
|
* [0.10.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.0) (May 30, 2025)
|
|
158
160
|
* [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
|
|
159
161
|
* [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Semantic Link Labs
|
|
2
2
|
|
|
3
3
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
4
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
5
5
|
[](https://github.com/psf/black)
|
|
6
6
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
7
7
|
|
|
@@ -30,6 +30,7 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
|
|
|
30
30
|
* [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
|
|
31
31
|
* [Model Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#model-best-practice-analyzer)
|
|
32
32
|
* [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vertipaq-analyzer)
|
|
33
|
+
* [Create a .vpax file](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#create-a-vpax-file)
|
|
33
34
|
* [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
|
|
34
35
|
* [Translate a semantic model's metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#translate-a-semantic-model)
|
|
35
36
|
* [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
|
|
@@ -128,6 +129,7 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
128
129
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
129
130
|
|
|
130
131
|
## Version History
|
|
132
|
+
* [0.10.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.1) (June 10, 2025)
|
|
131
133
|
* [0.10.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.0) (May 30, 2025)
|
|
132
134
|
* [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
|
|
133
135
|
* [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
|
|
@@ -7,7 +7,7 @@ name="semantic-link-labs"
|
|
|
7
7
|
authors = [
|
|
8
8
|
{ name = "Microsoft Corporation" },
|
|
9
9
|
]
|
|
10
|
-
version="0.10.
|
|
10
|
+
version="0.10.1"
|
|
11
11
|
description="Semantic Link Labs for Microsoft Fabric"
|
|
12
12
|
readme="README.md"
|
|
13
13
|
requires-python=">=3.10,<3.12"
|
|
@@ -23,7 +23,7 @@ classifiers = [
|
|
|
23
23
|
license= { text = "MIT License" }
|
|
24
24
|
|
|
25
25
|
dependencies = [
|
|
26
|
-
"semantic-link-sempy>=0.
|
|
26
|
+
"semantic-link-sempy>=0.11.0",
|
|
27
27
|
"anytree",
|
|
28
28
|
"powerbiclient",
|
|
29
29
|
"polib",
|
{semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1/src/semantic_link_labs.egg-info}/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: semantic-link-labs
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.1
|
|
4
4
|
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
License: MIT License
|
|
@@ -15,7 +15,7 @@ Classifier: Framework :: Jupyter
|
|
|
15
15
|
Requires-Python: <3.12,>=3.10
|
|
16
16
|
Description-Content-Type: text/markdown
|
|
17
17
|
License-File: LICENSE
|
|
18
|
-
Requires-Dist: semantic-link-sempy>=0.
|
|
18
|
+
Requires-Dist: semantic-link-sempy>=0.11.0
|
|
19
19
|
Requires-Dist: anytree
|
|
20
20
|
Requires-Dist: powerbiclient
|
|
21
21
|
Requires-Dist: polib
|
|
@@ -27,7 +27,7 @@ Dynamic: license-file
|
|
|
27
27
|
# Semantic Link Labs
|
|
28
28
|
|
|
29
29
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
30
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
31
31
|
[](https://github.com/psf/black)
|
|
32
32
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
33
33
|
|
|
@@ -56,6 +56,7 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
|
|
|
56
56
|
* [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
|
|
57
57
|
* [Model Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#model-best-practice-analyzer)
|
|
58
58
|
* [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vertipaq-analyzer)
|
|
59
|
+
* [Create a .vpax file](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#create-a-vpax-file)
|
|
59
60
|
* [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
|
|
60
61
|
* [Translate a semantic model's metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#translate-a-semantic-model)
|
|
61
62
|
* [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
|
|
@@ -154,6 +155,7 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
154
155
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
155
156
|
|
|
156
157
|
## Version History
|
|
158
|
+
* [0.10.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.1) (June 10, 2025)
|
|
157
159
|
* [0.10.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.0) (May 30, 2025)
|
|
158
160
|
* [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
|
|
159
161
|
* [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
|
{semantic_link_labs-0.10.0 → semantic_link_labs-0.10.1}/src/semantic_link_labs.egg-info/SOURCES.txt
RENAMED
|
@@ -7,6 +7,7 @@ src/semantic_link_labs.egg-info/dependency_links.txt
|
|
|
7
7
|
src/semantic_link_labs.egg-info/requires.txt
|
|
8
8
|
src/semantic_link_labs.egg-info/top_level.txt
|
|
9
9
|
src/sempy_labs/__init__.py
|
|
10
|
+
src/sempy_labs/_a_lib_info.py
|
|
10
11
|
src/sempy_labs/_ai.py
|
|
11
12
|
src/sempy_labs/_authentication.py
|
|
12
13
|
src/sempy_labs/_capacities.py
|
|
@@ -18,6 +19,7 @@ src/sempy_labs/_data_pipelines.py
|
|
|
18
19
|
src/sempy_labs/_dataflows.py
|
|
19
20
|
src/sempy_labs/_dax.py
|
|
20
21
|
src/sempy_labs/_dax_query_view.py
|
|
22
|
+
src/sempy_labs/_daxformatter.py
|
|
21
23
|
src/sempy_labs/_delta_analyzer.py
|
|
22
24
|
src/sempy_labs/_delta_analyzer_history.py
|
|
23
25
|
src/sempy_labs/_deployment_pipelines.py
|
|
@@ -56,6 +58,7 @@ src/sempy_labs/_refresh_semantic_model.py
|
|
|
56
58
|
src/sempy_labs/_semantic_models.py
|
|
57
59
|
src/sempy_labs/_spark.py
|
|
58
60
|
src/sempy_labs/_sql.py
|
|
61
|
+
src/sempy_labs/_sql_endpoints.py
|
|
59
62
|
src/sempy_labs/_sqldatabase.py
|
|
60
63
|
src/sempy_labs/_tags.py
|
|
61
64
|
src/sempy_labs/_translations.py
|
|
@@ -160,6 +163,9 @@ src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py
|
|
|
160
163
|
src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py
|
|
161
164
|
src/sempy_labs/migration/_migration_validation.py
|
|
162
165
|
src/sempy_labs/migration/_refresh_calc_tables.py
|
|
166
|
+
src/sempy_labs/mirrored_azure_databricks_catalog/__init__.py
|
|
167
|
+
src/sempy_labs/mirrored_azure_databricks_catalog/_discover.py
|
|
168
|
+
src/sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py
|
|
163
169
|
src/sempy_labs/report/_BPAReportTemplate.json
|
|
164
170
|
src/sempy_labs/report/__init__.py
|
|
165
171
|
src/sempy_labs/report/_download_report.py
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
from sempy_labs._sql_endpoints import (
|
|
2
|
+
list_sql_endpoints,
|
|
3
|
+
refresh_sql_endpoint_metadata,
|
|
4
|
+
)
|
|
1
5
|
from sempy_labs._variable_libraries import (
|
|
2
6
|
list_variable_libraries,
|
|
3
7
|
delete_variable_library,
|
|
@@ -30,6 +34,7 @@ from sempy_labs._semantic_models import (
|
|
|
30
34
|
enable_semantic_model_scheduled_refresh,
|
|
31
35
|
delete_semantic_model,
|
|
32
36
|
update_semantic_model_refresh_schedule,
|
|
37
|
+
list_semantic_model_datasources,
|
|
33
38
|
)
|
|
34
39
|
from sempy_labs._graphQL import (
|
|
35
40
|
list_graphql_apis,
|
|
@@ -275,7 +280,6 @@ from sempy_labs._list_functions import (
|
|
|
275
280
|
get_object_level_security,
|
|
276
281
|
list_datamarts,
|
|
277
282
|
list_lakehouses,
|
|
278
|
-
list_sql_endpoints,
|
|
279
283
|
update_item,
|
|
280
284
|
list_server_properties,
|
|
281
285
|
list_semantic_model_errors,
|
|
@@ -589,4 +593,6 @@ __all__ = [
|
|
|
589
593
|
"apply_tags",
|
|
590
594
|
"unapply_tags",
|
|
591
595
|
"get_user_delegation_key",
|
|
596
|
+
"refresh_sql_endpoint_metadata",
|
|
597
|
+
"list_semantic_model_datasources",
|
|
592
598
|
]
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
from typing import List, Optional
|
|
3
|
+
from sempy_labs._a_lib_info import lib_name, lib_version
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def _format_dax(
|
|
7
|
+
expressions: str | List[str],
|
|
8
|
+
skip_space_after_function_name: bool = False,
|
|
9
|
+
metadata: Optional[List[dict]] = None,
|
|
10
|
+
) -> List[str]:
|
|
11
|
+
|
|
12
|
+
if isinstance(expressions, str):
|
|
13
|
+
expressions = [expressions]
|
|
14
|
+
metadata = [metadata] if metadata else [{}]
|
|
15
|
+
|
|
16
|
+
# Add variable assignment to each expression
|
|
17
|
+
expressions = [f"x :={item}" for item in expressions]
|
|
18
|
+
|
|
19
|
+
url = "https://daxformatter.azurewebsites.net/api/daxformatter/daxtextformatmulti"
|
|
20
|
+
|
|
21
|
+
payload = {
|
|
22
|
+
"Dax": expressions,
|
|
23
|
+
"MaxLineLength": 0,
|
|
24
|
+
"SkipSpaceAfterFunctionName": skip_space_after_function_name,
|
|
25
|
+
"ListSeparator": ",",
|
|
26
|
+
"DecimalSeparator": ".",
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
headers = {
|
|
30
|
+
"Accept": "application/json, text/javascript, */*; q=0.01",
|
|
31
|
+
"Accept-Encoding": "gzip,deflate",
|
|
32
|
+
"Accept-Language": "en-US,en;q=0.8",
|
|
33
|
+
"Content-Type": "application/json; charset=UTF-8",
|
|
34
|
+
"Host": "daxformatter.azurewebsites.net",
|
|
35
|
+
"Expect": "100-continue",
|
|
36
|
+
"Connection": "Keep-Alive",
|
|
37
|
+
"CallerApp": lib_name,
|
|
38
|
+
"CallerVersion": lib_version,
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
response = requests.post(url, json=payload, headers=headers)
|
|
42
|
+
result = []
|
|
43
|
+
for idx, dax in enumerate(response.json()):
|
|
44
|
+
formatted_dax = dax.get("formatted")
|
|
45
|
+
errors = dax.get("errors")
|
|
46
|
+
if errors:
|
|
47
|
+
meta = metadata[idx] if metadata and idx < len(metadata) else {}
|
|
48
|
+
obj_name = meta.get("name", "Unknown")
|
|
49
|
+
table_name = meta.get("table", "Unknown")
|
|
50
|
+
obj_type = meta.get("type", "Unknown")
|
|
51
|
+
if obj_type == "calculated_tables":
|
|
52
|
+
raise ValueError(
|
|
53
|
+
f"DAX formatting failed for the '{obj_name}' calculated table: {errors}"
|
|
54
|
+
)
|
|
55
|
+
elif obj_type == "calculated_columns":
|
|
56
|
+
raise ValueError(
|
|
57
|
+
f"DAX formatting failed for the '{table_name}'[{obj_name}] calculated column: {errors}"
|
|
58
|
+
)
|
|
59
|
+
elif obj_type == "calculation_items":
|
|
60
|
+
raise ValueError(
|
|
61
|
+
f"DAX formatting failed for the '{table_name}'[{obj_name}] calculation item: {errors}"
|
|
62
|
+
)
|
|
63
|
+
elif obj_type == "measures":
|
|
64
|
+
raise ValueError(
|
|
65
|
+
f"DAX formatting failed for the '{obj_name}' measure: {errors}"
|
|
66
|
+
)
|
|
67
|
+
elif obj_type == "rls":
|
|
68
|
+
raise ValueError(
|
|
69
|
+
f"DAX formatting failed for the row level security expression on the '{table_name}' table within the '{obj_name}' role: {errors}"
|
|
70
|
+
)
|
|
71
|
+
else:
|
|
72
|
+
NotImplementedError()
|
|
73
|
+
else:
|
|
74
|
+
if formatted_dax.startswith("x :="):
|
|
75
|
+
formatted_dax = formatted_dax[4:]
|
|
76
|
+
formatted_dax = formatted_dax.strip()
|
|
77
|
+
result.append(formatted_dax)
|
|
78
|
+
return result
|
|
@@ -642,49 +642,6 @@ def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
642
642
|
return df
|
|
643
643
|
|
|
644
644
|
|
|
645
|
-
def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
646
|
-
"""
|
|
647
|
-
Shows the SQL endpoints within a workspace.
|
|
648
|
-
|
|
649
|
-
Parameters
|
|
650
|
-
----------
|
|
651
|
-
workspace : str | uuid.UUID, default=None
|
|
652
|
-
The Fabric workspace name or ID.
|
|
653
|
-
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
654
|
-
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
655
|
-
|
|
656
|
-
Returns
|
|
657
|
-
-------
|
|
658
|
-
pandas.DataFrame
|
|
659
|
-
A pandas dataframe showing the SQL endpoints within a workspace.
|
|
660
|
-
"""
|
|
661
|
-
|
|
662
|
-
columns = {
|
|
663
|
-
"SQL Endpoint Id": "string",
|
|
664
|
-
"SQL Endpoint Name": "string",
|
|
665
|
-
"Description": "string",
|
|
666
|
-
}
|
|
667
|
-
df = _create_dataframe(columns=columns)
|
|
668
|
-
|
|
669
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
670
|
-
|
|
671
|
-
responses = _base_api(
|
|
672
|
-
request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
|
|
673
|
-
)
|
|
674
|
-
|
|
675
|
-
for r in responses:
|
|
676
|
-
for v in r.get("value", []):
|
|
677
|
-
|
|
678
|
-
new_data = {
|
|
679
|
-
"SQL Endpoint Id": v.get("id"),
|
|
680
|
-
"SQL Endpoint Name": v.get("displayName"),
|
|
681
|
-
"Description": v.get("description"),
|
|
682
|
-
}
|
|
683
|
-
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
684
|
-
|
|
685
|
-
return df
|
|
686
|
-
|
|
687
|
-
|
|
688
645
|
def list_datamarts(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
689
646
|
"""
|
|
690
647
|
Shows the datamarts within a workspace.
|
|
@@ -159,6 +159,7 @@ def import_notebook_from_web(
|
|
|
159
159
|
notebook_content=response.content,
|
|
160
160
|
workspace=workspace_id,
|
|
161
161
|
description=description,
|
|
162
|
+
format="ipynb",
|
|
162
163
|
)
|
|
163
164
|
elif len(dfI_filt) > 0 and overwrite:
|
|
164
165
|
print(f"{icons.info} Overwrite of notebooks is currently not supported.")
|
|
@@ -202,9 +203,8 @@ def create_notebook(
|
|
|
202
203
|
otherwise notebook_content should be GIT friendly format
|
|
203
204
|
"""
|
|
204
205
|
|
|
205
|
-
notebook_payload = base64.b64encode(notebook_content.
|
|
206
|
-
|
|
207
|
-
)
|
|
206
|
+
notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
|
|
207
|
+
|
|
208
208
|
definition_payload = {
|
|
209
209
|
"parts": [
|
|
210
210
|
{
|
|
@@ -8,6 +8,8 @@ from sempy_labs._helper_functions import (
|
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
9
|
resolve_dataset_name_and_id,
|
|
10
10
|
delete_item,
|
|
11
|
+
resolve_dataset_id,
|
|
12
|
+
resolve_workspace_id,
|
|
11
13
|
)
|
|
12
14
|
import sempy_labs._icons as icons
|
|
13
15
|
import re
|
|
@@ -227,3 +229,102 @@ def update_semantic_model_refresh_schedule(
|
|
|
227
229
|
print(
|
|
228
230
|
f"{icons.green_dot} Refresh schedule for the '{dataset_name}' within the '{workspace_name}' workspace has been updated."
|
|
229
231
|
)
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def list_semantic_model_datasources(
|
|
235
|
+
dataset: str | UUID,
|
|
236
|
+
workspace: Optional[str | UUID] = None,
|
|
237
|
+
expand_details: bool = True,
|
|
238
|
+
) -> pd.DataFrame:
|
|
239
|
+
"""
|
|
240
|
+
Lists the data sources for the specified semantic model.
|
|
241
|
+
|
|
242
|
+
This is a wrapper function for the following API: `Datasets - Get Datasources In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-datasources-in-group>`_.
|
|
243
|
+
|
|
244
|
+
Parameters
|
|
245
|
+
----------
|
|
246
|
+
dataset : str | uuid.UUID
|
|
247
|
+
Name or ID of the semantic model.
|
|
248
|
+
workspace : str | uuid.UUID, default=None
|
|
249
|
+
The workspace name or ID.
|
|
250
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
251
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
252
|
+
expand_details : bool, default=True
|
|
253
|
+
If True, expands the connection details for each data source.
|
|
254
|
+
|
|
255
|
+
Returns
|
|
256
|
+
-------
|
|
257
|
+
pandas.DataFrame
|
|
258
|
+
DataFrame containing the data sources for the specified semantic model.
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
262
|
+
dataset_id = resolve_dataset_id(dataset, workspace_id)
|
|
263
|
+
|
|
264
|
+
if expand_details:
|
|
265
|
+
columns = {
|
|
266
|
+
"Datasource Type": "str",
|
|
267
|
+
"Connection Server": "str",
|
|
268
|
+
"Connection Database": "str",
|
|
269
|
+
"Connection Path": "str",
|
|
270
|
+
"Connection Account": "str",
|
|
271
|
+
"Connection Domain": "str",
|
|
272
|
+
"Connection Kind": "str",
|
|
273
|
+
"Connection Email Address": "str",
|
|
274
|
+
"Connection URL": "str",
|
|
275
|
+
"Connection Class Info": "str",
|
|
276
|
+
"Connection Login Server": "str",
|
|
277
|
+
"Datasource Id": "str",
|
|
278
|
+
"Gateway Id": "str",
|
|
279
|
+
}
|
|
280
|
+
else:
|
|
281
|
+
columns = {
|
|
282
|
+
"Datasource Type": "str",
|
|
283
|
+
"Connection Details": "str",
|
|
284
|
+
"Datasource Id": "str",
|
|
285
|
+
"Gateway Id": "str",
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
df = _create_dataframe(columns)
|
|
289
|
+
|
|
290
|
+
response = _base_api(
|
|
291
|
+
request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/datasources",
|
|
292
|
+
client="fabric_sp",
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
dfs = []
|
|
296
|
+
for item in response.json().get("value", []):
|
|
297
|
+
ds_type = item.get("datasourceType")
|
|
298
|
+
conn_details = item.get("connectionDetails", {})
|
|
299
|
+
ds_id = item.get("datasourceId")
|
|
300
|
+
gateway_id = item.get("gatewayId")
|
|
301
|
+
if expand_details:
|
|
302
|
+
new_data = {
|
|
303
|
+
"Datasource Type": ds_type,
|
|
304
|
+
"Connection Server": conn_details.get("server"),
|
|
305
|
+
"Connection Database": conn_details.get("database"),
|
|
306
|
+
"Connection Path": conn_details.get("path"),
|
|
307
|
+
"Connection Account": conn_details.get("account"),
|
|
308
|
+
"Connection Domain": conn_details.get("domain"),
|
|
309
|
+
"Connection Kind": conn_details.get("kind"),
|
|
310
|
+
"Connection Email Address": conn_details.get("emailAddress"),
|
|
311
|
+
"Connection URL": conn_details.get("url"),
|
|
312
|
+
"Connection Class Info": conn_details.get("classInfo"),
|
|
313
|
+
"Connection Login Server": conn_details.get("loginServer"),
|
|
314
|
+
"Datasource Id": ds_id,
|
|
315
|
+
"Gateway Id": gateway_id,
|
|
316
|
+
}
|
|
317
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
318
|
+
else:
|
|
319
|
+
new_data = {
|
|
320
|
+
"Datasource Type": ds_type,
|
|
321
|
+
"Connection Details": conn_details,
|
|
322
|
+
"Datasource Id": ds_id,
|
|
323
|
+
"Gateway Id": gateway_id,
|
|
324
|
+
}
|
|
325
|
+
dfs.append(pd.DataFrame([new_data]))
|
|
326
|
+
|
|
327
|
+
if dfs:
|
|
328
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
329
|
+
|
|
330
|
+
return df
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
from typing import Optional, Literal
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
_base_api,
|
|
6
|
+
_create_dataframe,
|
|
7
|
+
resolve_workspace_name_and_id,
|
|
8
|
+
resolve_item_name_and_id,
|
|
9
|
+
_update_dataframe_datatypes,
|
|
10
|
+
)
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def list_sql_endpoints(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
15
|
+
"""
|
|
16
|
+
Shows the SQL endpoints within a workspace.
|
|
17
|
+
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
workspace : str | uuid.UUID, default=None
|
|
21
|
+
The Fabric workspace name or ID.
|
|
22
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
23
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
24
|
+
|
|
25
|
+
Returns
|
|
26
|
+
-------
|
|
27
|
+
pandas.DataFrame
|
|
28
|
+
A pandas dataframe showing the SQL endpoints within a workspace.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
columns = {
|
|
32
|
+
"SQL Endpoint Id": "string",
|
|
33
|
+
"SQL Endpoint Name": "string",
|
|
34
|
+
"Description": "string",
|
|
35
|
+
}
|
|
36
|
+
df = _create_dataframe(columns=columns)
|
|
37
|
+
|
|
38
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
|
+
|
|
40
|
+
responses = _base_api(
|
|
41
|
+
request=f"/v1/workspaces/{workspace_id}/sqlEndpoints", uses_pagination=True
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
for r in responses:
|
|
45
|
+
for v in r.get("value", []):
|
|
46
|
+
|
|
47
|
+
new_data = {
|
|
48
|
+
"SQL Endpoint Id": v.get("id"),
|
|
49
|
+
"SQL Endpoint Name": v.get("displayName"),
|
|
50
|
+
"Description": v.get("description"),
|
|
51
|
+
}
|
|
52
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
53
|
+
|
|
54
|
+
return df
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def refresh_sql_endpoint_metadata(
|
|
58
|
+
item: str | UUID,
|
|
59
|
+
type: Literal["Lakehouse", "MirroredDatabase"],
|
|
60
|
+
workspace: Optional[str | UUID] = None,
|
|
61
|
+
tables: dict[str, list[str]] = None,
|
|
62
|
+
) -> pd.DataFrame:
|
|
63
|
+
"""
|
|
64
|
+
Refreshes the metadata of a SQL endpoint.
|
|
65
|
+
|
|
66
|
+
This is a wrapper function for the following API: `Items - Refresh Sql Endpoint Metadata <https://learn.microsoft.com/rest/api/fabric/sqlendpoint/items/refresh-sql-endpoint-metadata>`_.
|
|
67
|
+
|
|
68
|
+
Parameters
|
|
69
|
+
----------
|
|
70
|
+
item : str | uuid.UUID
|
|
71
|
+
The name or ID of the item (Lakehouse or MirroredDatabase).
|
|
72
|
+
type : Literal['Lakehouse', 'MirroredDatabase']
|
|
73
|
+
The type of the item. Must be 'Lakehouse' or 'MirroredDatabase'.
|
|
74
|
+
workspace : str | uuid.UUID, default=None
|
|
75
|
+
The Fabric workspace name or ID.
|
|
76
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
77
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
78
|
+
tables : dict[str, list[str]], default=None
|
|
79
|
+
A dictionary where the keys are schema names and the values are lists of table names.
|
|
80
|
+
If empty, all table metadata will be refreshed.
|
|
81
|
+
|
|
82
|
+
Example:
|
|
83
|
+
{
|
|
84
|
+
"dbo": ["DimDate", "DimGeography"],
|
|
85
|
+
"sls": ["FactSales", "FactBudget"],
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
Returns
|
|
89
|
+
-------
|
|
90
|
+
pandas.DataFrame
|
|
91
|
+
A pandas dataframe showing the status of the metadata refresh operation.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
95
|
+
|
|
96
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
97
|
+
item=item, type=type, workspace=workspace
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
if type == "Lakehouse":
|
|
101
|
+
response = _base_api(
|
|
102
|
+
request=f"/v1/workspaces/{workspace_id}/lakehouses/{item_id}",
|
|
103
|
+
client="fabric_sp",
|
|
104
|
+
)
|
|
105
|
+
sql_endpoint_id = (
|
|
106
|
+
response.json()
|
|
107
|
+
.get("properties", {})
|
|
108
|
+
.get("sqlEndpointProperties", {})
|
|
109
|
+
.get("id")
|
|
110
|
+
)
|
|
111
|
+
elif type == "MirroredDatabase":
|
|
112
|
+
response = _base_api(
|
|
113
|
+
request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}",
|
|
114
|
+
client="fabric_sp",
|
|
115
|
+
)
|
|
116
|
+
sql_endpoint_id = (
|
|
117
|
+
response.json()
|
|
118
|
+
.get("properties", {})
|
|
119
|
+
.get("sqlEndpointProperties", {})
|
|
120
|
+
.get("id")
|
|
121
|
+
)
|
|
122
|
+
else:
|
|
123
|
+
raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
|
|
124
|
+
|
|
125
|
+
payload = {}
|
|
126
|
+
if tables:
|
|
127
|
+
payload = {
|
|
128
|
+
"tableDefinitions": [
|
|
129
|
+
{"schema": schema, "tableNames": tables}
|
|
130
|
+
for schema, tables in tables.items()
|
|
131
|
+
]
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
result = _base_api(
|
|
135
|
+
request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata?preview=true",
|
|
136
|
+
method="post",
|
|
137
|
+
status_codes=[200, 202],
|
|
138
|
+
lro_return_json=True,
|
|
139
|
+
payload=payload,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
columns = {
|
|
143
|
+
"Table Name": "string",
|
|
144
|
+
"Status": "string",
|
|
145
|
+
"Start Time": "datetime",
|
|
146
|
+
"End Time": "datetime",
|
|
147
|
+
"Last Successful Sync Time": "datetime",
|
|
148
|
+
"Error Code": "string",
|
|
149
|
+
"Error Message": "string",
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
df = pd.json_normalize(result)
|
|
153
|
+
|
|
154
|
+
# Extract error code and message, set to None if no error
|
|
155
|
+
df['Error Code'] = df.get('error.errorCode', None)
|
|
156
|
+
df['Error Message'] = df.get('error.message', None)
|
|
157
|
+
|
|
158
|
+
# Friendly column renaming
|
|
159
|
+
df.rename(columns={
|
|
160
|
+
'tableName': 'Table Name',
|
|
161
|
+
'startDateTime': 'Start Time',
|
|
162
|
+
'endDateTime': 'End Time',
|
|
163
|
+
'status': 'Status',
|
|
164
|
+
'lastSuccessfulSyncDateTime': 'Last Successful Sync Time'
|
|
165
|
+
}, inplace=True)
|
|
166
|
+
|
|
167
|
+
# Drop the original 'error' column if present
|
|
168
|
+
df.drop(columns=[col for col in ['error'] if col in df.columns], inplace=True)
|
|
169
|
+
|
|
170
|
+
# Optional: Reorder columns
|
|
171
|
+
column_order = [
|
|
172
|
+
'Table Name', 'Status', 'Start Time', 'End Time',
|
|
173
|
+
'Last Successful Sync Time', 'Error Code', 'Error Message'
|
|
174
|
+
]
|
|
175
|
+
df = df[column_order]
|
|
176
|
+
|
|
177
|
+
_update_dataframe_datatypes(df, columns)
|
|
178
|
+
|
|
179
|
+
printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
|
|
180
|
+
if tables:
|
|
181
|
+
print(f"{printout} for the following tables: {tables}.")
|
|
182
|
+
else:
|
|
183
|
+
print(f"{printout} for all tables.")
|
|
184
|
+
|
|
185
|
+
return df
|