semantic-link-labs 0.9.5__tar.gz → 0.9.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/PKG-INFO +3 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/README.md +2 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/docs/source/conf.py +1 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Service Principal.ipynb +1 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/pyproject.toml +1 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/PKG-INFO +3 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/SOURCES.txt +1 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/__init__.py +15 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_capacities.py +37 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_capacity_migration.py +11 -14
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_connections.py +2 -4
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_dataflows.py +2 -2
- semantic_link_labs-0.9.6/src/sempy_labs/_dax_query_view.py +55 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_delta_analyzer.py +16 -14
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_environments.py +8 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_eventhouses.py +5 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_external_data_shares.py +4 -10
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_generate_semantic_model.py +2 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_graphQL.py +5 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_helper_functions.py +272 -51
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_kql_databases.py +5 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_list_functions.py +5 -37
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_managed_private_endpoints.py +9 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_mirrored_databases.py +3 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_ml_experiments.py +1 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_model_bpa.py +2 -11
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_model_bpa_bulk.py +23 -27
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_one_lake_integration.py +2 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_semantic_models.py +20 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_sql.py +6 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_sqldatabase.py +61 -100
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_vertipaq.py +8 -11
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_warehouses.py +14 -3
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_workspace_identity.py +6 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_workspaces.py +42 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_basic_functions.py +3 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_scanner.py +2 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_directlake_schema_compare.py +2 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_directlake_schema_sync.py +65 -19
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_dl_helper.py +0 -6
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_generate_shared_expression.py +10 -11
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_guardrails.py +2 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_update_directlake_partition_entity.py +2 -2
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/lakehouse/_shortcuts.py +7 -5
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_migration_validation.py +0 -4
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_download_report.py +4 -6
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_generate_report.py +6 -6
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_report_functions.py +2 -1
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_report_rebind.py +8 -6
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/tests/test_tom.py +2 -3
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.github/ISSUE_TEMPLATE/bug_report.md +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.github/ISSUE_TEMPLATE/feature_request.md +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.github/ISSUE_TEMPLATE/issue--question---advice-needed.md +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.github/workflows/build.yaml +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.github/workflows/codeql.yaml +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.gitignore +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.readthedocs.yaml +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/.vscode/settings.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/CODE_OF_CONDUCT.md +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/LICENSE +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/SECURITY.md +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/SUPPORT.md +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/docs/Makefile +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/docs/make.bat +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/docs/requirements.txt +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/docs/source/index.rst +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/docs/source/modules.rst +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/environment.yml +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Best Practice Analyzer Report.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Capacity Migration.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Delta Analyzer.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Migration to Direct Lake.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Model Optimization.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Query Scale Out.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Report Analysis.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/SQL.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Semantic Model Management.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Semantic Model Refresh.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/notebooks/Tabular Object Model.ipynb +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/setup.cfg +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/dependency_links.txt +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/requires.txt +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/top_level.txt +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_ai.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_authentication.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_am-ET.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_da-DK.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_de-DE.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_el-GR.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_es-ES.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_he-IL.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_id-ID.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_is-IS.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_it-IT.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_te-IN.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_th-TH.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_clear_cache.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_dashboards.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_data_pipelines.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_dax.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_deployment_pipelines.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_documentation.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_eventstreams.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_gateways.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_git.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_icons.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_job_scheduler.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_kql_querysets.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_mirrored_warehouses.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_ml_models.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_model_auto_build.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_model_bpa_rules.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_model_dependencies.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_mounted_data_factories.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_notebooks.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_query_scale_out.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_refresh_semantic_model.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_spark.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_translations.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_utils.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_workloads.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_activities.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_apps.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_artifacts.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_capacities.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_datasets.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_domains.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_external_data_share.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_git.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_items.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_reports.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_shared.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_tenant.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_users.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/admin/_workspaces.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_get_directlake_lakehouse.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_get_shared_expression.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/directlake/_warm_cache.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/graph/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/graph/_groups.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/graph/_teams.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/graph/_users.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/lakehouse/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/lakehouse/_lakehouse.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_create_pqt_file.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_direct_lake_to_import.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/migration/_refresh_calc_tables.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_BPAReportTemplate.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/.platform +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/report.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition/version.json +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_bpareporttemplate/definition.pbir +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_export_report.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_paginated.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_report_bpa.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_report_bpa_rules.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_report_helper.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_report_list_functions.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/report/_reportwrapper.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/tom/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/tom/_model.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/tests/__init__.py +0 -0
- {semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/tests/test_friendly_case.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: semantic-link-labs
|
|
3
|
-
Version: 0.9.
|
|
3
|
+
Version: 0.9.6
|
|
4
4
|
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
License: MIT License
|
|
@@ -26,7 +26,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
|
|
|
26
26
|
# Semantic Link Labs
|
|
27
27
|
|
|
28
28
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
29
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
30
30
|
[](https://github.com/psf/black)
|
|
31
31
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
32
32
|
|
|
@@ -148,6 +148,7 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
148
148
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
149
149
|
|
|
150
150
|
## Version History
|
|
151
|
+
* [0.9.6](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.6) (March 12, 2025)
|
|
151
152
|
* [0.9.5](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.5) (March 7, 2025)
|
|
152
153
|
* [0.9.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.4) (February 27, 2025)
|
|
153
154
|
* [0.9.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.3) (February 13, 2025)
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# Semantic Link Labs
|
|
2
2
|
|
|
3
3
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
4
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
5
5
|
[](https://github.com/psf/black)
|
|
6
6
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
7
7
|
|
|
@@ -123,6 +123,7 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
123
123
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
124
124
|
|
|
125
125
|
## Version History
|
|
126
|
+
* [0.9.6](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.6) (March 12, 2025)
|
|
126
127
|
* [0.9.5](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.5) (March 7, 2025)
|
|
127
128
|
* [0.9.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.4) (February 27, 2025)
|
|
128
129
|
* [0.9.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.3) (February 13, 2025)
|
|
@@ -13,7 +13,7 @@ sys.path.insert(0, os.path.abspath('../../src/'))
|
|
|
13
13
|
project = 'semantic-link-labs'
|
|
14
14
|
copyright = '2024, Microsoft and community'
|
|
15
15
|
author = 'Microsoft and community'
|
|
16
|
-
release = '0.9.
|
|
16
|
+
release = '0.9.6'
|
|
17
17
|
|
|
18
18
|
# -- General configuration ---------------------------------------------------
|
|
19
19
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
{semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/PKG-INFO
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: semantic-link-labs
|
|
3
|
-
Version: 0.9.
|
|
3
|
+
Version: 0.9.6
|
|
4
4
|
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
License: MIT License
|
|
@@ -26,7 +26,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
|
|
|
26
26
|
# Semantic Link Labs
|
|
27
27
|
|
|
28
28
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
29
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
30
30
|
[](https://github.com/psf/black)
|
|
31
31
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
32
32
|
|
|
@@ -148,6 +148,7 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
148
148
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
149
149
|
|
|
150
150
|
## Version History
|
|
151
|
+
* [0.9.6](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.6) (March 12, 2025)
|
|
151
152
|
* [0.9.5](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.5) (March 7, 2025)
|
|
152
153
|
* [0.9.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.4) (February 27, 2025)
|
|
153
154
|
* [0.9.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.3) (February 13, 2025)
|
{semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/semantic_link_labs.egg-info/SOURCES.txt
RENAMED
|
@@ -47,6 +47,7 @@ src/sempy_labs/_dashboards.py
|
|
|
47
47
|
src/sempy_labs/_data_pipelines.py
|
|
48
48
|
src/sempy_labs/_dataflows.py
|
|
49
49
|
src/sempy_labs/_dax.py
|
|
50
|
+
src/sempy_labs/_dax_query_view.py
|
|
50
51
|
src/sempy_labs/_delta_analyzer.py
|
|
51
52
|
src/sempy_labs/_deployment_pipelines.py
|
|
52
53
|
src/sempy_labs/_documentation.py
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
from sempy_labs._dax_query_view import (
|
|
2
|
+
generate_dax_query_view_url,
|
|
3
|
+
)
|
|
1
4
|
from sempy_labs._mounted_data_factories import (
|
|
2
5
|
list_mounted_data_factories,
|
|
3
6
|
get_mounted_data_factory_definition,
|
|
@@ -7,6 +10,7 @@ from sempy_labs._mounted_data_factories import (
|
|
|
7
10
|
from sempy_labs._semantic_models import (
|
|
8
11
|
get_semantic_model_refresh_schedule,
|
|
9
12
|
enable_semantic_model_scheduled_refresh,
|
|
13
|
+
delete_semantic_model,
|
|
10
14
|
)
|
|
11
15
|
from sempy_labs._graphQL import (
|
|
12
16
|
list_graphql_apis,
|
|
@@ -148,6 +152,7 @@ from sempy_labs._capacities import (
|
|
|
148
152
|
create_or_update_resource_group,
|
|
149
153
|
list_resource_groups,
|
|
150
154
|
get_resource_group,
|
|
155
|
+
list_capacities,
|
|
151
156
|
)
|
|
152
157
|
from sempy_labs._spark import (
|
|
153
158
|
get_spark_settings,
|
|
@@ -165,6 +170,7 @@ from sempy_labs._workspaces import (
|
|
|
165
170
|
assign_workspace_to_capacity,
|
|
166
171
|
unassign_workspace_from_capacity,
|
|
167
172
|
list_workspace_role_assignments,
|
|
173
|
+
delete_workspace,
|
|
168
174
|
)
|
|
169
175
|
from sempy_labs._notebooks import (
|
|
170
176
|
get_notebook_definition,
|
|
@@ -180,6 +186,9 @@ from sempy_labs._sql import (
|
|
|
180
186
|
from sempy_labs._sqldatabase import (
|
|
181
187
|
get_sql_database_columns,
|
|
182
188
|
get_sql_database_tables,
|
|
189
|
+
create_sql_database,
|
|
190
|
+
delete_sql_database,
|
|
191
|
+
list_sql_databases,
|
|
183
192
|
)
|
|
184
193
|
from sempy_labs._workspace_identity import (
|
|
185
194
|
provision_workspace_identity,
|
|
@@ -244,7 +253,6 @@ from sempy_labs._list_functions import (
|
|
|
244
253
|
list_semantic_model_objects,
|
|
245
254
|
list_shortcuts,
|
|
246
255
|
get_object_level_security,
|
|
247
|
-
list_capacities,
|
|
248
256
|
list_datamarts,
|
|
249
257
|
list_lakehouses,
|
|
250
258
|
list_sql_endpoints,
|
|
@@ -540,4 +548,10 @@ __all__ = [
|
|
|
540
548
|
"list_mounted_data_factories",
|
|
541
549
|
"get_mounted_data_factory_definition",
|
|
542
550
|
"delete_mounted_data_factory",
|
|
551
|
+
"generate_dax_query_view_url",
|
|
552
|
+
"delete_semantic_model",
|
|
553
|
+
"delete_workspace",
|
|
554
|
+
"create_sql_database",
|
|
555
|
+
"delete_sql_database",
|
|
556
|
+
"list_sql_databases",
|
|
543
557
|
]
|
|
@@ -242,7 +242,7 @@ def list_vcores() -> pd.DataFrame:
|
|
|
242
242
|
|
|
243
243
|
def get_capacity_resource_governance(capacity_name: str):
|
|
244
244
|
|
|
245
|
-
dfC =
|
|
245
|
+
dfC = list_capacities()
|
|
246
246
|
dfC_filt = dfC[dfC["Display Name"] == capacity_name]
|
|
247
247
|
capacity_id = dfC_filt["Id"].iloc[0].upper()
|
|
248
248
|
|
|
@@ -1131,3 +1131,39 @@ def get_resource_group(azure_subscription_id: str, resource_group: str) -> pd.Da
|
|
|
1131
1131
|
}
|
|
1132
1132
|
|
|
1133
1133
|
return pd.DataFrame(new_data, index=[0])
|
|
1134
|
+
|
|
1135
|
+
|
|
1136
|
+
def list_capacities() -> pd.DataFrame:
|
|
1137
|
+
"""
|
|
1138
|
+
Shows the capacities and their properties.
|
|
1139
|
+
|
|
1140
|
+
Returns
|
|
1141
|
+
-------
|
|
1142
|
+
pandas.DataFrame
|
|
1143
|
+
A pandas dataframe showing the capacities and their properties
|
|
1144
|
+
"""
|
|
1145
|
+
|
|
1146
|
+
columns = {
|
|
1147
|
+
"Id": "string",
|
|
1148
|
+
"Display Name": "string",
|
|
1149
|
+
"Sku": "string",
|
|
1150
|
+
"Region": "string",
|
|
1151
|
+
"State": "string",
|
|
1152
|
+
"Admins": "string",
|
|
1153
|
+
}
|
|
1154
|
+
df = _create_dataframe(columns=columns)
|
|
1155
|
+
|
|
1156
|
+
response = _base_api(request="/v1.0/myorg/capacities", client="fabric_sp")
|
|
1157
|
+
|
|
1158
|
+
for i in response.json().get("value", []):
|
|
1159
|
+
new_data = {
|
|
1160
|
+
"Id": i.get("id").lower(),
|
|
1161
|
+
"Display Name": i.get("displayName"),
|
|
1162
|
+
"Sku": i.get("sku"),
|
|
1163
|
+
"Region": i.get("region"),
|
|
1164
|
+
"State": i.get("state"),
|
|
1165
|
+
"Admins": [i.get("admins", [])],
|
|
1166
|
+
}
|
|
1167
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
1168
|
+
|
|
1169
|
+
return df
|
|
@@ -106,17 +106,13 @@ def migrate_workspaces(
|
|
|
106
106
|
migrated_workspaces = []
|
|
107
107
|
|
|
108
108
|
for i, r in dfW.iterrows():
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
if workspaces is None or
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
if assign_workspace_to_capacity(
|
|
117
|
-
capacity_name=target_capacity, workspace=workspace
|
|
118
|
-
):
|
|
119
|
-
migrated_workspaces.append(workspace)
|
|
109
|
+
workspace_id = r["Id"]
|
|
110
|
+
workspace_name = r["Name"]
|
|
111
|
+
if workspaces is None or workspace_name in workspaces:
|
|
112
|
+
assign_workspace_to_capacity(
|
|
113
|
+
capacity=target_capacity, workspace=workspace_id
|
|
114
|
+
)
|
|
115
|
+
migrated_workspaces.append(workspace_name)
|
|
120
116
|
|
|
121
117
|
if len(migrated_workspaces) < workspace_count:
|
|
122
118
|
print(
|
|
@@ -124,10 +120,11 @@ def migrate_workspaces(
|
|
|
124
120
|
)
|
|
125
121
|
print(f"{icons.in_progress} Initiating rollback...")
|
|
126
122
|
for i, r in dfW.iterrows():
|
|
127
|
-
|
|
128
|
-
|
|
123
|
+
workspace_id = r["Id"]
|
|
124
|
+
workspace_name = r["Name"]
|
|
125
|
+
if workspace_name in migrated_workspaces:
|
|
129
126
|
assign_workspace_to_capacity(
|
|
130
|
-
|
|
127
|
+
capacity=source_capacity, workspace=workspace_id
|
|
131
128
|
)
|
|
132
129
|
print(
|
|
133
130
|
f"{icons.green_dot} Rollback of the workspaces to the '{source_capacity}' capacity is complete."
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
6
|
_update_dataframe_datatypes,
|
|
8
7
|
_base_api,
|
|
9
8
|
_create_dataframe,
|
|
9
|
+
resolve_item_id,
|
|
10
10
|
)
|
|
11
11
|
from uuid import UUID
|
|
12
12
|
import sempy_labs._icons as icons
|
|
@@ -230,9 +230,7 @@ def list_item_connections(
|
|
|
230
230
|
|
|
231
231
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
232
232
|
item_type = item_type[0].upper() + item_type[1:]
|
|
233
|
-
item_id =
|
|
234
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
235
|
-
)
|
|
233
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
236
234
|
|
|
237
235
|
columns = {
|
|
238
236
|
"Connection Name": "string",
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from sempy_labs._helper_functions import (
|
|
4
3
|
resolve_workspace_name_and_id,
|
|
@@ -6,6 +5,7 @@ from sempy_labs._helper_functions import (
|
|
|
6
5
|
_update_dataframe_datatypes,
|
|
7
6
|
_base_api,
|
|
8
7
|
_create_dataframe,
|
|
8
|
+
resolve_workspace_name,
|
|
9
9
|
)
|
|
10
10
|
from typing import Optional, Tuple
|
|
11
11
|
import sempy_labs._icons as icons
|
|
@@ -187,7 +187,7 @@ def list_upstream_dataflows(
|
|
|
187
187
|
for v in values:
|
|
188
188
|
tgt_dataflow_id = v.get("targetDataflowId")
|
|
189
189
|
tgt_workspace_id = v.get("groupId")
|
|
190
|
-
tgt_workspace_name =
|
|
190
|
+
tgt_workspace_name = resolve_workspace_name(workspace_id=tgt_workspace_id)
|
|
191
191
|
(tgt_dataflow_name, _) = _resolve_dataflow_name_and_id(
|
|
192
192
|
dataflow=tgt_dataflow_id, workspace=tgt_workspace_id
|
|
193
193
|
)
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
from uuid import UUID
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
resolve_dataset_id,
|
|
5
|
+
_get_fabric_context_setting,
|
|
6
|
+
resolve_workspace_id,
|
|
7
|
+
)
|
|
8
|
+
import gzip
|
|
9
|
+
import base64
|
|
10
|
+
import urllib.parse
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def generate_dax_query_view_url(
|
|
14
|
+
dataset: str | UUID, dax_string: str, workspace: Optional[str | UUID] = None
|
|
15
|
+
):
|
|
16
|
+
"""
|
|
17
|
+
Prints a URL based on query provided. This URL opens `DAX query view <https://learn.microsoft.com/power-bi/transform-model/dax-query-view>`_ in the Power BI service, connected to the semantic model and using the query provided.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
dataset : str | uuid.UUID
|
|
22
|
+
The semantic model name or ID.
|
|
23
|
+
dax_string : str
|
|
24
|
+
The DAX query string.
|
|
25
|
+
workspace : str | uuid.UUID, default=None
|
|
26
|
+
The workspace name or ID.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
workspace_id = resolve_workspace_id(workspace=workspace)
|
|
32
|
+
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace_id)
|
|
33
|
+
|
|
34
|
+
prefix = _get_fabric_context_setting(name="spark.trident.pbienv").lower()
|
|
35
|
+
|
|
36
|
+
if prefix == "prod":
|
|
37
|
+
prefix = "app"
|
|
38
|
+
|
|
39
|
+
def gzip_base64_urlsafe(input_string):
|
|
40
|
+
# Compress the string with gzip
|
|
41
|
+
compressed_data = gzip.compress(input_string.encode("utf-8"))
|
|
42
|
+
|
|
43
|
+
# Encode the compressed data in base64
|
|
44
|
+
base64_data = base64.b64encode(compressed_data)
|
|
45
|
+
|
|
46
|
+
# Make the base64 string URL-safe
|
|
47
|
+
urlsafe_data = urllib.parse.quote_plus(base64_data.decode("utf-8"))
|
|
48
|
+
|
|
49
|
+
return urlsafe_data
|
|
50
|
+
|
|
51
|
+
formatted_query = gzip_base64_urlsafe(dax_string)
|
|
52
|
+
|
|
53
|
+
url = f"https://{prefix}.powerbi.com/groups/{workspace_id}/modeling/{dataset_id}/daxQueryView?query={formatted_query}"
|
|
54
|
+
|
|
55
|
+
print(url)
|
|
@@ -52,6 +52,7 @@ def delta_analyzer(
|
|
|
52
52
|
workspace: Optional[str | UUID] = None,
|
|
53
53
|
column_stats: bool = True,
|
|
54
54
|
skip_cardinality: bool = True,
|
|
55
|
+
schema: Optional[str] = None,
|
|
55
56
|
) -> Dict[str, pd.DataFrame]:
|
|
56
57
|
"""
|
|
57
58
|
Analyzes a delta table and shows the results in dictionary containing a set of 5 dataframes. If 'export' is set to True, the results will be saved to delta tables in the lakehouse attached to the notebook.
|
|
@@ -85,6 +86,8 @@ def delta_analyzer(
|
|
|
85
86
|
If True, collects data about column chunks and columns. If False, skips that step and only returns the other 3 dataframes.
|
|
86
87
|
skip_cardinality : bool, default=True
|
|
87
88
|
If True, skips the cardinality calculation for each column. If False, calculates the cardinality for each column.
|
|
89
|
+
schema : str, default=None
|
|
90
|
+
The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
|
|
88
91
|
|
|
89
92
|
Returns
|
|
90
93
|
-------
|
|
@@ -96,25 +99,21 @@ def delta_analyzer(
|
|
|
96
99
|
if not skip_cardinality:
|
|
97
100
|
column_stats = True
|
|
98
101
|
|
|
99
|
-
# display_toggle = notebookutils.common.configs.pandas_display
|
|
100
|
-
|
|
101
|
-
# Turn off notebookutils display
|
|
102
|
-
# if display_toggle is True:
|
|
103
|
-
# notebookutils.common.configs.pandas_display = False
|
|
104
|
-
|
|
105
102
|
prefix = "SLL_DeltaAnalyzer_"
|
|
106
103
|
now = datetime.now()
|
|
107
104
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
|
|
108
105
|
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
109
106
|
lakehouse=lakehouse, workspace=workspace
|
|
110
107
|
)
|
|
111
|
-
path = create_abfss_path(lakehouse_id, workspace_id, table_name)
|
|
112
|
-
local_path = _mount(lakehouse=lakehouse, workspace=workspace)
|
|
113
|
-
table_path = f"{local_path}/Tables/{table_name}"
|
|
114
|
-
delta_table_path = create_abfss_path(lakehouse_id, workspace_id, table_name)
|
|
115
108
|
|
|
116
|
-
|
|
117
|
-
|
|
109
|
+
delta_table_path = create_abfss_path(
|
|
110
|
+
lakehouse_id, workspace_id, table_name, schema=schema
|
|
111
|
+
)
|
|
112
|
+
local_path = _mount(lakehouse=lakehouse, workspace=workspace)
|
|
113
|
+
if schema is not None:
|
|
114
|
+
table_path = f"{local_path}/Tables/{schema}/{table_name}"
|
|
115
|
+
else:
|
|
116
|
+
table_path = f"{local_path}/Tables/{table_name}"
|
|
118
117
|
|
|
119
118
|
parquet_file_df_columns = {
|
|
120
119
|
# "Dataset": "string",
|
|
@@ -183,7 +182,7 @@ def delta_analyzer(
|
|
|
183
182
|
# min_reader_version = table_details.get("minReaderVersion")
|
|
184
183
|
# min_writer_version = table_details.get("minWriterVersion")
|
|
185
184
|
|
|
186
|
-
latest_files = _read_delta_table(
|
|
185
|
+
latest_files = _read_delta_table(delta_table_path).inputFiles()
|
|
187
186
|
# file_paths = [f.split("/")[-1] for f in latest_files]
|
|
188
187
|
all_parquet_files = get_parquet_file_infos(delta_table_path)
|
|
189
188
|
common_file_paths = set(
|
|
@@ -430,6 +429,7 @@ def get_delta_table_history(
|
|
|
430
429
|
table_name: str,
|
|
431
430
|
lakehouse: Optional[str | UUID] = None,
|
|
432
431
|
workspace: Optional[str | UUID] = None,
|
|
432
|
+
schema: Optional[str] = None,
|
|
433
433
|
) -> pd.DataFrame:
|
|
434
434
|
"""
|
|
435
435
|
Returns the history of a delta table as a pandas dataframe.
|
|
@@ -445,6 +445,8 @@ def get_delta_table_history(
|
|
|
445
445
|
The Fabric workspace name or ID used by the lakehouse.
|
|
446
446
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
447
447
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
448
|
+
schema : str, default=None
|
|
449
|
+
The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
|
|
448
450
|
|
|
449
451
|
Returns
|
|
450
452
|
-------
|
|
@@ -461,7 +463,7 @@ def get_delta_table_history(
|
|
|
461
463
|
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
462
464
|
lakehouse=lakehouse, workspace=workspace
|
|
463
465
|
)
|
|
464
|
-
path = create_abfss_path(lakehouse_id, workspace_id, table_name)
|
|
466
|
+
path = create_abfss_path(lakehouse_id, workspace_id, table_name, schema)
|
|
465
467
|
|
|
466
468
|
from delta import DeltaTable
|
|
467
469
|
|
|
@@ -48,6 +48,8 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
48
48
|
|
|
49
49
|
This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments>`_.
|
|
50
50
|
|
|
51
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
52
|
+
|
|
51
53
|
Parameters
|
|
52
54
|
----------
|
|
53
55
|
workspace : str | uuid.UUID, default=None
|
|
@@ -71,7 +73,9 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
71
73
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
72
74
|
|
|
73
75
|
responses = _base_api(
|
|
74
|
-
request=f"/v1/workspaces/{workspace_id}/environments",
|
|
76
|
+
request=f"/v1/workspaces/{workspace_id}/environments",
|
|
77
|
+
uses_pagination=True,
|
|
78
|
+
client="fabric_sp",
|
|
75
79
|
)
|
|
76
80
|
|
|
77
81
|
for r in responses:
|
|
@@ -113,6 +117,8 @@ def publish_environment(
|
|
|
113
117
|
|
|
114
118
|
This is a wrapper function for the following API: `Spark Libraries - Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/spark-libraries/publish-environment>`_.
|
|
115
119
|
|
|
120
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
121
|
+
|
|
116
122
|
Parameters
|
|
117
123
|
----------
|
|
118
124
|
environment: str | uuid.UUID
|
|
@@ -133,6 +139,7 @@ def publish_environment(
|
|
|
133
139
|
method="post",
|
|
134
140
|
lro_return_status_code=True,
|
|
135
141
|
status_codes=None,
|
|
142
|
+
client="fabric_sp",
|
|
136
143
|
)
|
|
137
144
|
|
|
138
145
|
print(
|
|
@@ -72,6 +72,8 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
72
72
|
|
|
73
73
|
This is a wrapper function for the following API: `Items - List Eventhouses <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventhouses>`_.
|
|
74
74
|
|
|
75
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
76
|
+
|
|
75
77
|
Parameters
|
|
76
78
|
----------
|
|
77
79
|
workspace : str | uuid.UUID, default=None
|
|
@@ -95,7 +97,9 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
95
97
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
96
98
|
|
|
97
99
|
responses = _base_api(
|
|
98
|
-
request=f"/v1/workspaces/{workspace_id}/eventhouses",
|
|
100
|
+
request=f"/v1/workspaces/{workspace_id}/eventhouses",
|
|
101
|
+
uses_pagination=True,
|
|
102
|
+
client="fabric_sp",
|
|
99
103
|
)
|
|
100
104
|
|
|
101
105
|
for r in responses:
|
{semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_external_data_shares.py
RENAMED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
from uuid import UUID
|
|
3
2
|
import pandas as pd
|
|
4
3
|
from typing import Optional, List
|
|
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
6
|
resolve_workspace_name_and_id,
|
|
8
7
|
_base_api,
|
|
9
8
|
_create_dataframe,
|
|
9
|
+
resolve_item_id,
|
|
10
10
|
)
|
|
11
11
|
|
|
12
12
|
|
|
@@ -39,9 +39,7 @@ def create_external_data_share(
|
|
|
39
39
|
"""
|
|
40
40
|
|
|
41
41
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
42
|
-
item_id =
|
|
43
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
44
|
-
)
|
|
42
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
45
43
|
|
|
46
44
|
if isinstance(paths, str):
|
|
47
45
|
paths = [paths]
|
|
@@ -85,9 +83,7 @@ def revoke_external_data_share(
|
|
|
85
83
|
"""
|
|
86
84
|
|
|
87
85
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
88
|
-
item_id =
|
|
89
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
90
|
-
)
|
|
86
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
91
87
|
|
|
92
88
|
_base_api(
|
|
93
89
|
request=f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke",
|
|
@@ -124,9 +120,7 @@ def list_external_data_shares_in_item(
|
|
|
124
120
|
"""
|
|
125
121
|
|
|
126
122
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
127
|
-
item_id =
|
|
128
|
-
item_name=item_name, type=item_type, workspace=workspace_id
|
|
129
|
-
)
|
|
123
|
+
item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
|
|
130
124
|
|
|
131
125
|
columns = {
|
|
132
126
|
"External Data Share Id": "string",
|
{semantic_link_labs-0.9.5 → semantic_link_labs-0.9.6}/src/sempy_labs/_generate_semantic_model.py
RENAMED
|
@@ -11,6 +11,7 @@ from sempy_labs._helper_functions import (
|
|
|
11
11
|
_decode_b64,
|
|
12
12
|
_base_api,
|
|
13
13
|
_mount,
|
|
14
|
+
resolve_workspace_id,
|
|
14
15
|
)
|
|
15
16
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
16
17
|
import sempy_labs._icons as icons
|
|
@@ -285,7 +286,7 @@ def deploy_semantic_model(
|
|
|
285
286
|
|
|
286
287
|
if target_workspace is None:
|
|
287
288
|
target_workspace_name = source_workspace_name
|
|
288
|
-
target_workspace_id =
|
|
289
|
+
target_workspace_id = resolve_workspace_id(workspace=target_workspace_name)
|
|
289
290
|
else:
|
|
290
291
|
(target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
|
|
291
292
|
target_workspace
|
|
@@ -15,6 +15,8 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
15
15
|
|
|
16
16
|
This is a wrapper function for the following API: `Items - List GraphQLApis <https://learn.microsoft.com/rest/api/fabric/graphqlapi/items/list-graphqlapi-s>`_.
|
|
17
17
|
|
|
18
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
19
|
+
|
|
18
20
|
Parameters
|
|
19
21
|
----------
|
|
20
22
|
workspace : str | uuid.UUID, default=None
|
|
@@ -38,7 +40,9 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
38
40
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
39
41
|
|
|
40
42
|
responses = _base_api(
|
|
41
|
-
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
43
|
+
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
44
|
+
uses_pagination=True,
|
|
45
|
+
client="fabric_sp",
|
|
42
46
|
)
|
|
43
47
|
|
|
44
48
|
for r in responses:
|