msfabricpysdkcore 0.0.9__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. msfabricpysdkcore/admin_item.py +7 -0
  2. msfabricpysdkcore/admin_workspace.py +20 -1
  3. msfabricpysdkcore/adminapi.py +133 -7
  4. msfabricpysdkcore/auth.py +9 -6
  5. msfabricpysdkcore/client.py +5 -4
  6. msfabricpysdkcore/coreapi.py +341 -17
  7. msfabricpysdkcore/deployment_pipeline.py +240 -0
  8. msfabricpysdkcore/environment.py +209 -0
  9. msfabricpysdkcore/item.py +12 -11
  10. msfabricpysdkcore/lakehouse.py +42 -1
  11. msfabricpysdkcore/long_running_operation.py +2 -6
  12. msfabricpysdkcore/otheritems.py +122 -3
  13. msfabricpysdkcore/spark_custom_pool.py +118 -0
  14. msfabricpysdkcore/tests/test_admin_apis.py +20 -9
  15. msfabricpysdkcore/tests/test_datapipelines.py +48 -0
  16. msfabricpysdkcore/tests/test_deployment_pipeline.py +64 -0
  17. msfabricpysdkcore/tests/test_domains.py +3 -2
  18. msfabricpysdkcore/tests/test_environments.py +65 -0
  19. msfabricpysdkcore/tests/test_evenstreams.py +44 -0
  20. msfabricpysdkcore/tests/test_git.py +3 -1
  21. msfabricpysdkcore/tests/test_items_incl_lakehouse.py +81 -109
  22. msfabricpysdkcore/tests/test_jobs.py +4 -0
  23. msfabricpysdkcore/tests/test_kqldatabases.py +48 -0
  24. msfabricpysdkcore/tests/test_shortcuts.py +3 -1
  25. msfabricpysdkcore/tests/test_spark.py +91 -0
  26. msfabricpysdkcore/tests/test_workspaces_capacities.py +6 -5
  27. msfabricpysdkcore/workspace.py +358 -32
  28. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/METADATA +82 -32
  29. msfabricpysdkcore-0.0.11.dist-info/RECORD +38 -0
  30. msfabricpysdkcore-0.0.9.dist-info/RECORD +0 -29
  31. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/LICENSE +0 -0
  32. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/WHEEL +0 -0
  33. {msfabricpysdkcore-0.0.9.dist-info → msfabricpysdkcore-0.0.11.dist-info}/top_level.txt +0 -0
@@ -4,14 +4,16 @@ from time import sleep
4
4
 
5
5
  from msfabricpysdkcore.capacity import Capacity
6
6
  from msfabricpysdkcore.client import FabricClient
7
+ from msfabricpysdkcore.deployment_pipeline import DeploymentPipeline
8
+ from msfabricpysdkcore.long_running_operation import LongRunningOperation
7
9
  from msfabricpysdkcore.workspace import Workspace
8
10
 
9
11
  class FabricClientCore(FabricClient):
10
12
  """FabricClientCore class to interact with Fabric Core APIs"""
11
13
 
12
- def __init__(self, tenant_id = None, client_id = None, client_secret = None) -> None:
14
+ def __init__(self, tenant_id = None, client_id = None, client_secret = None, silent=False) -> None:
13
15
  """Initialize FabricClientCore object"""
14
- super().__init__(tenant_id, client_id, client_secret)
16
+ super().__init__(tenant_id, client_id, client_secret, silent=silent)
15
17
 
16
18
 
17
19
  def list_workspaces(self, continuationToken = None):
@@ -311,17 +313,18 @@ class FabricClientCore(FabricClient):
311
313
  return cap
312
314
  raise ValueError("No capacity found")
313
315
 
314
- def list_tables(self, workspace_id, item_id):
315
- ws = self.get_workspace_by_id(workspace_id)
316
- return ws.list_tables(item_id=item_id)
317
-
318
- def load_table(self, workspace_id, item_id, table_name, path_type, relative_path,
319
- file_extension = None, format_options = None,
320
- mode = None, recursive = None, wait_for_completion = True):
321
- ws = self.get_workspace_by_id(workspace_id)
322
- return ws.load_table(item_id, table_name, path_type, relative_path,
323
- file_extension, format_options,
324
- mode, recursive, wait_for_completion)
316
+
317
+ # long running operations
318
+
319
+ def get_operation_results(self, operation_id):
320
+ """Get the results of an operation"""
321
+ lro = LongRunningOperation(operation_id=operation_id, auth=self.auth)
322
+ return lro.get_operation_results()
323
+
324
+ def get_operation_state(self, operation_id):
325
+ """Get the state of an operation"""
326
+ lro = LongRunningOperation(operation_id=operation_id, auth=self.auth)
327
+ return lro.get_operation_state()
325
328
 
326
329
  # list things
327
330
 
@@ -347,6 +350,11 @@ class FabricClientCore(FabricClient):
347
350
 
348
351
  # dataPipelines
349
352
 
353
+ def create_data_pipeline(self, workspace_id, display_name, description = None):
354
+ """Create a data pipeline in a workspace"""
355
+ ws = self.get_workspace_by_id(workspace_id)
356
+ return ws.create_data_pipeline(display_name = display_name, description = description)
357
+
350
358
  def list_data_pipelines(self, workspace_id, with_properties = False):
351
359
  """List data pipelines in a workspace"""
352
360
  ws = self.get_workspace_by_id(workspace_id)
@@ -367,6 +375,119 @@ class FabricClientCore(FabricClient):
367
375
  ws = self.get_workspace_by_id(workspace_id)
368
376
  return ws.get_data_pipeline(data_pipeline_id).update(display_name=display_name, description=description)
369
377
 
378
+ # environments
379
+
380
+ def list_environments(self, workspace_id, with_properties = False):
381
+ """List environments in a workspace"""
382
+ ws = self.get_workspace_by_id(workspace_id)
383
+ return ws.list_environments(with_properties = with_properties)
384
+
385
+ def create_environment(self, workspace_id, display_name, description = None):
386
+ """Create an environment in a workspace"""
387
+ ws = self.get_workspace_by_id(workspace_id)
388
+ return ws.create_environment(display_name = display_name, description = description)
389
+
390
+ def get_environment(self, workspace_id, environment_id = None, environment_name = None):
391
+ """Get an environment from a workspace"""
392
+ ws = self.get_workspace_by_id(workspace_id)
393
+ return ws.get_environment(environment_id = environment_id, environment_name = environment_name)
394
+
395
+ def delete_environment(self, workspace_id, environment_id):
396
+ """Delete an environment from a workspace"""
397
+ ws = self.get_workspace_by_id(workspace_id)
398
+ return ws.delete_environment(environment_id)
399
+
400
+ def update_environment(self, workspace_id, environment_id, display_name = None, description = None):
401
+ """Update an environment in a workspace"""
402
+ ws = self.get_workspace_by_id(workspace_id)
403
+ return ws.update_environment(environment_id, display_name = display_name, description = description)
404
+
405
+ # environmentSparkCompute
406
+
407
+ def get_published_settings(self, workspace_id, environment_id):
408
+ """Get published settings for an environment"""
409
+ ws = self.get_workspace_by_id(workspace_id)
410
+ return ws.get_environment(environment_id).get_published_settings()
411
+
412
+ def get_staging_settings(self, workspace_id, environment_id):
413
+ """Get staging settings for an environment"""
414
+ ws = self.get_workspace_by_id(workspace_id)
415
+ return ws.get_environment(environment_id).get_staging_settings()
416
+
417
+ def update_staging_settings(self, workspace_id, environment_id,
418
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
419
+ executor_cores = None, executor_memory = None, instance_pool = None,
420
+ runtime_version = None, spark_properties = None):
421
+
422
+ return self.get_environment(workspace_id, environment_id).update_staging_settings(driver_cores=driver_cores,
423
+ driver_memory=driver_memory,
424
+ dynamic_executor_allocation=dynamic_executor_allocation,
425
+ executor_cores=executor_cores,
426
+ executor_memory=executor_memory,
427
+ instance_pool=instance_pool,
428
+ runtime_version=runtime_version,
429
+ spark_properties=spark_properties)
430
+
431
+
432
+ # environmentSparkLibraries
433
+
434
+ def get_published_libraries(self, workspace_id, environment_id):
435
+ """Get published libraries for an environment"""
436
+ ws = self.get_workspace_by_id(workspace_id)
437
+ return ws.get_environment(environment_id).get_published_libraries()
438
+
439
+ def get_staging_libraries(self, workspace_id, environment_id):
440
+ """Get staging libraries for an environment"""
441
+ ws = self.get_workspace_by_id(workspace_id)
442
+ return ws.get_environment(environment_id).get_staging_libraries()
443
+
444
+ def upload_staging_library(self, workspace_id, environment_id, file_path):
445
+ """Update staging libraries for an environment"""
446
+ ws = self.get_workspace_by_id(workspace_id)
447
+ return ws.get_environment(environment_id).upload_staging_library(file_path=file_path)
448
+
449
+ def publish_environment(self, workspace_id, environment_id):
450
+ """Publish an environment"""
451
+ ws = self.get_workspace_by_id(workspace_id)
452
+ return ws.get_environment(environment_id).publish_environment()
453
+
454
+ def delete_staging_library(self, workspace_id, environment_id, library_to_delete):
455
+ """Delete a staging library from an environment"""
456
+ ws = self.get_workspace_by_id(workspace_id)
457
+ return ws.get_environment(environment_id).delete_staging_library(library_to_delete)
458
+
459
+ def cancel_publish(self, workspace_id, environment_id):
460
+ """Cancel publishing an environment"""
461
+ ws = self.get_workspace_by_id(workspace_id)
462
+ return ws.get_environment(environment_id).cancel_publish()
463
+
464
+ # eventhouses
465
+
466
+ def list_eventhouses(self, workspace_id):
467
+ """List eventhouses in a workspace"""
468
+ ws = self.get_workspace_by_id(workspace_id)
469
+ return ws.list_eventhouses()
470
+
471
+ def create_eventhouse(self, workspace_id, display_name, description = None):
472
+ """Create an eventhouse in a workspace"""
473
+ ws = self.get_workspace_by_id(workspace_id)
474
+ return ws.create_eventhouse(display_name = display_name, description = description)
475
+
476
+ def get_eventhouse(self, workspace_id, eventhouse_id = None, eventhouse_name = None):
477
+ """Get an eventhouse from a workspace"""
478
+ ws = self.get_workspace_by_id(workspace_id)
479
+ return ws.get_eventhouse(eventhouse_id = eventhouse_id, eventhouse_name = eventhouse_name)
480
+
481
+ def delete_eventhouse(self, workspace_id, eventhouse_id):
482
+ """Delete an eventhouse from a workspace"""
483
+ ws = self.get_workspace_by_id(workspace_id)
484
+ return ws.delete_eventhouse(eventhouse_id)
485
+
486
+ def update_eventhouse(self, workspace_id, eventhouse_id, display_name = None, description = None):
487
+ """Update an eventhouse in a workspace"""
488
+ ws = self.get_workspace_by_id(workspace_id)
489
+ return ws.update_eventhouse(eventhouse_id, display_name = display_name, description = description)
490
+
370
491
  # eventstreams
371
492
 
372
493
  def list_eventstreams(self, workspace_id):
@@ -400,6 +521,11 @@ class FabricClientCore(FabricClient):
400
521
  """List kql databases in a workspace"""
401
522
  ws = self.get_workspace_by_id(workspace_id)
402
523
  return ws.list_kql_databases()
524
+
525
+ def create_kql_database(self, workspace_id, creation_payload, display_name, description = None):
526
+ """Create a kql database in a workspace"""
527
+ ws = self.get_workspace_by_id(workspace_id)
528
+ return ws.create_kql_database(creation_payload = creation_payload, display_name = display_name, description = description)
403
529
 
404
530
  def get_kql_database(self, workspace_id, kql_database_id = None, kql_database_name = None):
405
531
  """Get a kql database from a workspace"""
@@ -465,6 +591,27 @@ class FabricClientCore(FabricClient):
465
591
  ws = self.get_workspace_by_id(workspace_id)
466
592
  return ws.get_lakehouse(lakehouse_id = lakehouse_id, lakehouse_name = lakehouse_name)
467
593
 
594
+ def list_tables(self, workspace_id, lakehouse_id):
595
+ ws = self.get_workspace_by_id(workspace_id)
596
+ return ws.list_tables(lakehouse_id=lakehouse_id)
597
+
598
+ def load_table(self, workspace_id, lakehouse_id, table_name, path_type, relative_path,
599
+ file_extension = None, format_options = None,
600
+ mode = None, recursive = None, wait_for_completion = True):
601
+ ws = self.get_workspace_by_id(workspace_id)
602
+ return ws.load_table(lakehouse_id, table_name, path_type, relative_path,
603
+ file_extension, format_options,
604
+ mode, recursive, wait_for_completion)
605
+
606
+ def run_on_demand_table_maintenance(self, workspace_id, lakehouse_id,
607
+ execution_data = None,
608
+ job_type = "TableMaintenance", wait_for_completion = True):
609
+ ws = self.get_workspace_by_id(workspace_id)
610
+ return ws.run_on_demand_table_maintenance(lakehouse_id = lakehouse_id,
611
+ execution_data = execution_data,
612
+ job_type = job_type,
613
+ wait_for_completion= wait_for_completion)
614
+
468
615
  # mlExperiments
469
616
 
470
617
  def list_ml_experiments(self, workspace_id):
@@ -546,6 +693,11 @@ class FabricClientCore(FabricClient):
546
693
  ws = self.get_workspace_by_id(workspace_id)
547
694
  return ws.update_notebook(notebook_id, display_name = display_name, description = description)
548
695
 
696
+ def get_notebook_definition(self, workspace_id, notebook_id, format = None):
697
+ """Get the definition of a notebook"""
698
+ ws = self.get_workspace_by_id(workspace_id)
699
+ return ws.get_notebook_definition(notebook_id, format = format)
700
+
549
701
  def update_notebook_definition(self, workspace_id, notebook_id, definition):
550
702
  """Update the definition of a notebook"""
551
703
  ws = self.get_workspace_by_id(workspace_id)
@@ -573,6 +725,11 @@ class FabricClientCore(FabricClient):
573
725
  ws = self.get_workspace_by_id(workspace_id)
574
726
  return ws.delete_report(report_id)
575
727
 
728
+ def get_report_definition(self, workspace_id, report_id, format = None):
729
+ """Get the definition of a report"""
730
+ ws = self.get_workspace_by_id(workspace_id)
731
+ return ws.get_report_definition(report_id, format = format)
732
+
576
733
  def update_report_definition(self, workspace_id, report_id, definition):
577
734
  """Update the definition of a report"""
578
735
  ws = self.get_workspace_by_id(workspace_id)
@@ -600,11 +757,16 @@ class FabricClientCore(FabricClient):
600
757
  ws = self.get_workspace_by_id(workspace_id)
601
758
  return ws.delete_semantic_model(semantic_model_id)
602
759
 
603
- def update_semantic_model(self, workspace_id, semantic_model_id, display_name = None, description = None):
604
- """Update a semantic model in a workspace"""
605
- ws = self.get_workspace_by_id(workspace_id)
606
- return ws.update_semantic_model(semantic_model_id, display_name = display_name, description = description)
760
+ # def update_semantic_model(self, workspace_id, semantic_model_id, display_name = None, description = None):
761
+ # """Update a semantic model in a workspace"""
762
+ # ws = self.get_workspace_by_id(workspace_id)
763
+ # return ws.update_semantic_model(semantic_model_id, display_name = display_name, description = description)
607
764
 
765
+ def get_semantic_model_definition(self, workspace_id, semantic_model_id, format = None):
766
+ """Get the definition of a semantic model"""
767
+ ws = self.get_workspace_by_id(workspace_id)
768
+ return ws.get_semantic_model_definition(semantic_model_id, format = format)
769
+
608
770
  def update_semantic_model_definition(self, workspace_id, semantic_model_id, definition):
609
771
  """Update the definition of a semantic model"""
610
772
  ws = self.get_workspace_by_id(workspace_id)
@@ -637,6 +799,11 @@ class FabricClientCore(FabricClient):
637
799
  ws = self.get_workspace_by_id(workspace_id)
638
800
  return ws.update_spark_job_definition(spark_job_definition_id, display_name = display_name, description = description)
639
801
 
802
+ def get_spark_job_definition_definition(self, workspace_id, spark_job_definition_id, format = None):
803
+ """Get the definition of a spark job definition"""
804
+ ws = self.get_workspace_by_id(workspace_id)
805
+ return ws.get_spark_job_definition_definition(spark_job_definition_id, format = format)
806
+
640
807
  def update_spark_job_definition_definition(self, workspace_id, spark_job_definition_id, definition):
641
808
  """Update the definition of a spark job definition"""
642
809
  ws = self.get_workspace_by_id(workspace_id)
@@ -668,3 +835,160 @@ class FabricClientCore(FabricClient):
668
835
  """Update a warehouse in a workspace"""
669
836
  ws = self.get_workspace_by_id(workspace_id)
670
837
  return ws.update_warehouse(warehouse_id, display_name = display_name, description = description)
838
+
839
+ # spark workspace custom pools
840
+
841
+ def list_workspace_custom_pools(self, workspace_id):
842
+ """List workspace custom pools"""
843
+ ws = self.get_workspace_by_id(workspace_id)
844
+ return ws.list_workspace_custom_pools()
845
+
846
+ def create_workspace_custom_pool(self, workspace_id, name, node_family, node_size, auto_scale, dynamic_executor_allocation):
847
+ """Create a workspace custom pool"""
848
+ ws = self.get_workspace_by_id(workspace_id)
849
+ return ws.create_workspace_custom_pool(name = name,
850
+ node_family = node_family,
851
+ node_size = node_size,
852
+ auto_scale = auto_scale,
853
+ dynamic_executor_allocation = dynamic_executor_allocation)
854
+
855
+
856
+ def get_workspace_custom_pool(self, workspace_id, pool_id):
857
+ """Get a workspace custom pool"""
858
+ ws = self.get_workspace_by_id(workspace_id)
859
+ return ws.get_workspace_custom_pool(pool_id)
860
+
861
+ def delete_workspace_custom_pool(self, workspace_id, pool_id):
862
+ """Delete a workspace custom pool"""
863
+ ws = self.get_workspace_by_id(workspace_id)
864
+ return ws.delete_workspace_custom_pool(pool_id)
865
+
866
+ def update_workspace_custom_pool(self, workspace_id, pool_id, name = None, node_family = None, node_size = None, auto_scale = None, dynamic_executor_allocation = None):
867
+ """Update a workspace custom pool"""
868
+ ws = self.get_workspace_by_id(workspace_id)
869
+ return ws.update_workspace_custom_pool(pool_id, name = name, node_family = node_family, node_size = node_size, auto_scale = auto_scale, dynamic_executor_allocation = dynamic_executor_allocation)
870
+
871
+ # Deployment Pipelines
872
+
873
+ def deploy_stage_content(self, deployment_pipeline_id, source_stage_id, target_stage_id, created_workspace_details = None,
874
+ items = None, note = None, wait_for_completion = True):
875
+ """Deploy stage content
876
+ Args:
877
+ deployment_pipeline_id (str): The ID of the deployment pipeline
878
+ source_stage_id (str): The ID of the source stage
879
+ target_stage_id (str): The ID of the target stage
880
+ created_workspace_details (list): A list of created workspace details
881
+ items (list): A list of items
882
+ note (str): A note
883
+ wait_for_completion (bool): Whether to wait for the deployment to complete
884
+ Returns:
885
+ Details about the dpeloyment"""
886
+
887
+ pipeline = DeploymentPipeline.get_pipeline(deployment_pipeline_id, auth=self.auth)
888
+
889
+ return pipeline.deploy(source_stage_id, target_stage_id, created_workspace_details = created_workspace_details,
890
+ items = items, note = note, wait_for_completion = wait_for_completion)
891
+
892
+ def list_deployment_pipelines(self, continuationToken = None):
893
+ """List deployment pipelines
894
+ Args:
895
+ continuationToken (str): The continuation token for pagination
896
+ Returns:
897
+ list: List of DeploymentPipeline objects
898
+ """
899
+ # GET https://api.fabric.microsoft.com/v1/deploymentPipelines
900
+
901
+ url = "https://api.fabric.microsoft.com/v1/deploymentPipelines"
902
+
903
+ if continuationToken:
904
+ url = f"{url}?continuationToken={continuationToken}"
905
+
906
+ for _ in range(10):
907
+ response = requests.get(url=url, headers=self.auth.get_headers())
908
+ if response.status_code == 429:
909
+ print("Too many requests, waiting 10 seconds")
910
+ sleep(10)
911
+ continue
912
+ if response.status_code not in (200, 429):
913
+ raise Exception(f"Error listing deployment pipelines: {response.status_code}, {response.text}")
914
+ break
915
+
916
+ resp_dict = json.loads(response.text)
917
+ items = resp_dict["value"]
918
+
919
+ dep_pipes = [DeploymentPipeline.from_dict(i, auth=self.auth) for i in items]
920
+
921
+ if "continuationToken" in resp_dict:
922
+ dep_pipes_next = self.list_deployment_pipelines(continuationToken=resp_dict["continuationToken"])
923
+ dep_pipes.extend(dep_pipes_next)
924
+
925
+ return dep_pipes
926
+
927
+ def get_deployment_pipeline(self, pipeline_id):
928
+ """Get a deployment pipeline by ID
929
+ Args:
930
+ pipeline_id (str): The ID of the deployment pipeline
931
+ Returns:
932
+ DeploymentPipeline: The deployment pipeline
933
+ """
934
+ return DeploymentPipeline.get_pipeline(pipeline_id, auth=self.auth)
935
+
936
+ def get_deployment_pipeline_stages_items(self, pipeline_id, stage_id = None, stage_name = None):
937
+ """Get the items in a deployment stage
938
+ Args:
939
+ pipeline_id (str): The ID of the deployment pipeline
940
+ stage_id (str): The ID of the deployment stage
941
+ stage_name (str): The name of the deployment stage
942
+ Returns:
943
+ list: List of DeploymentStageItem objects
944
+ """
945
+ pipeline = DeploymentPipeline.get_pipeline(pipeline_id, auth=self.auth)
946
+ return pipeline.get_deployment_pipeline_stages_items(stage_id, stage_name)
947
+
948
+ def get_deployment_pipeline_stages(self, pipeline_id):
949
+ """Get the stages of a deployment pipeline
950
+ Args:
951
+ pipeline_id (str): The ID of the deployment pipeline
952
+ Returns:
953
+ list: List of DeploymentPipelineStage objects
954
+ """
955
+ pipeline = self.get_deployment_pipeline(pipeline_id)
956
+ return pipeline.get_stages()
957
+
958
+ def list_deployment_pipeline_stages(self, pipeline_id):
959
+ """Get the stages of a deployment pipeline
960
+ Args:
961
+ pipeline_id (str): The ID of the deployment pipeline
962
+ Returns:
963
+ list: List of DeploymentPipelineStage objects
964
+ """
965
+ return self.get_deployment_pipeline_stages(pipeline_id)
966
+
967
+
968
+ # Spark workspace settings
969
+
970
+ def get_spark_settings(self, workspace_id):
971
+ """Get spark settings for a workspace
972
+ Args:
973
+ workspace_id (str): The ID of the workspace
974
+ Returns:
975
+ dict: The spark settings"""
976
+ ws = self.get_workspace_by_id(workspace_id)
977
+ return ws.get_spark_settings()
978
+
979
+ def update_spark_settings(self, workspace_id, automatic_log = None,
980
+ environment = None, high_concurrency = None, pool = None):
981
+ """Update spark settings for a workspace
982
+ Args:
983
+ workspace_id (str): The ID of the workspace
984
+ automatic_log (bool): Whether to automatically log
985
+ environment (str): The environment
986
+ high_concurrency (bool): Whether to use high concurrency
987
+ pool (str): The pool
988
+ Returns:
989
+ dict: The updated spark settings"""
990
+ ws = self.get_workspace_by_id(workspace_id)
991
+ return ws.update_spark_settings(automatic_log=automatic_log,
992
+ environment=environment,
993
+ high_concurrency=high_concurrency,
994
+ pool=pool)
@@ -0,0 +1,240 @@
1
+ import json
2
+ from time import sleep
3
+
4
+ import requests
5
+ from msfabricpysdkcore.long_running_operation import check_long_running_operation
6
+
7
+
8
+ class DeploymentPipeline:
9
+ """Class to represent a deployment pipeline in Microsoft Fabric"""
10
+
11
+ def __init__(self, id, display_name, description, auth) -> None:
12
+ self.id = id
13
+ self.display_name = display_name
14
+ self.description = description
15
+ self.auth = auth
16
+
17
+
18
+ def from_dict(dict, auth):
19
+ """Create a Workspace object from a dictionary"""
20
+ if dict["displayName"] == None:
21
+ dict["displayName"] = dict["display_name"]
22
+
23
+
24
+ return DeploymentPipeline(
25
+ id=dict["id"],
26
+ display_name=dict["displayName"],
27
+ description=dict["description"],
28
+ auth=auth
29
+ )
30
+
31
+
32
+ def __str__(self) -> str:
33
+ """Return a string representation of the workspace object"""
34
+ dict_ = {"id": self.id, "display_name": self.display_name, "description": self.description}
35
+
36
+ return json.dumps(dict_, indent=2)
37
+
38
+ def __repr__(self) -> str:
39
+ return self.__str__()
40
+
41
+ def get_pipeline(deployment_pipeline_id, auth):
42
+ """Get a deployment pipeline"""
43
+ # GET https://api.fabric.microsoft.com/v1/deploymentPipelines/{deploymentPipelineId}
44
+
45
+ url = f"https://api.fabric.microsoft.com/v1/deploymentPipelines/{deployment_pipeline_id}"
46
+
47
+ for _ in range(10):
48
+ response = requests.get(url=url, headers=auth.get_headers())
49
+ if response.status_code == 429:
50
+ print("Too many requests, waiting 10 seconds")
51
+ sleep(10)
52
+ continue
53
+ if response.status_code not in (200, 429):
54
+ print(response.status_code)
55
+ print(response.text)
56
+ raise Exception(f"Error getting item: {response.text}")
57
+ break
58
+
59
+ item_dict = json.loads(response.text)
60
+ return DeploymentPipeline.from_dict(item_dict, auth)
61
+
62
+ def get_stages(self, continuationToken = None):
63
+ """Get stages in a deployment pipeline"""
64
+ # GET https://api.fabric.microsoft.com/v1/deploymentPipelines/{deploymentPipelineId}/stages
65
+
66
+ url = f"https://api.fabric.microsoft.com/v1/deploymentPipelines/{self.id}/stages"
67
+
68
+ if continuationToken:
69
+ url += f"?continuationToken={continuationToken}"
70
+
71
+ for _ in range(10):
72
+ response = requests.get(url=url, headers=self.auth.get_headers())
73
+ if response.status_code == 429:
74
+ print("Too many requests, waiting 10 seconds")
75
+ sleep(10)
76
+ continue
77
+ if response.status_code not in (200, 429):
78
+ print(response.status_code)
79
+ print(response.text)
80
+ raise Exception(f"Error getting stages: {response.text}")
81
+ break
82
+
83
+ resp_dict = json.loads(response.text)
84
+ items = resp_dict["value"]
85
+ for item in items:
86
+ item["deploymentPipelineId"] = self.id
87
+ stages = [Deployment_Pipeline_Stage.from_dict(item, self.auth) for item in items]
88
+
89
+ if "continuationToken" in resp_dict:
90
+ stages_next = self.get_stages(continuationToken=resp_dict["continuationToken"])
91
+ stages.extend(stages_next)
92
+
93
+ return stages
94
+
95
+
96
+ def deploy(self, source_stage_id, target_stage_id, created_workspace_details = None,
97
+ items = None, note = None, wait_for_completion = True):
98
+ # POST https://api.fabric.microsoft.com/v1/deploymentPipelines/{deploymentPipelineId}/deploy
99
+
100
+ url = f"https://api.fabric.microsoft.com/v1/deploymentPipelines/{self.id}/deploy"
101
+
102
+ body = {
103
+ "sourceStageId": source_stage_id,
104
+ "targetStageId": target_stage_id
105
+ }
106
+
107
+ if created_workspace_details:
108
+ body["createdWorkspaceDetails"] = created_workspace_details
109
+ if items:
110
+ body["items"] = items
111
+ if note:
112
+ body["note"] = note
113
+
114
+
115
+ for _ in range(10):
116
+ response = requests.post(url=url, headers=self.auth.get_headers(), json=body)
117
+ if response.status_code == 429:
118
+ print("Too many requests, waiting 10 seconds")
119
+ sleep(10)
120
+ continue
121
+ if response.status_code == 202 and wait_for_completion:
122
+ print("successfully started the operation")
123
+ try:
124
+ operation_result = check_long_running_operation( response.headers, self.auth)
125
+ return operation_result
126
+ except Exception as e:
127
+ print("Problem waiting for long running operation. Returning initial response.")
128
+ print(e)
129
+ return response
130
+ if response.status_code not in (200, 429):
131
+ print(response.status_code)
132
+ print(response.text)
133
+ raise Exception(f"Error deploying: {response.text}")
134
+ break
135
+
136
+ return response.json()
137
+
138
+
139
+
140
+ def get_deployment_pipeline_stages_items(self, stage_id = None, stage_name = None):
141
+ """Get items in a deployment pipeline stage"""
142
+
143
+ if stage_id == None and stage_name == None:
144
+ raise Exception("Please provide either stage_id or stage_name")
145
+ stages = self.get_stages()
146
+ if stage_id is None:
147
+ dep_pip_stages = [stage for stage in stages if stage.display_name == stage_name]
148
+ if len(dep_pip_stages) == 0:
149
+ raise Exception(f"Stage with name {stage_name} not found")
150
+ else:
151
+ dep_pip_stages = [stage for stage in stages if stage.id == stage_id]
152
+ if len(dep_pip_stages) == 0:
153
+ raise Exception(f"Stage with id {stage_id} not found")
154
+ dep_pip_stage = dep_pip_stages[0]
155
+ return dep_pip_stage.get_items()
156
+
157
+ class Deployment_Pipeline_Stage():
158
+
159
+ """Class to represent a deployment pipeline stage in Microsoft Fabric"""
160
+
161
+ def __init__(self, id, order, display_name, description, workspace_id, workspace_name, is_public, deployment_pipeline_id, auth) -> None:
162
+ self.id = id
163
+ self.order = order
164
+ self.display_name = display_name
165
+ self.description = description
166
+ self.workspace_id = workspace_id
167
+ self.workspace_name = workspace_name
168
+ self.is_public = is_public
169
+ self.deployment_pipeline_id = deployment_pipeline_id
170
+ self.auth = auth
171
+
172
+
173
+ def from_dict(dict, auth):
174
+ """Create a Workspace object from a dictionary"""
175
+ if dict["displayName"] is None:
176
+ dict["displayName"] = dict["display_name"]
177
+
178
+ if dict.get("workspaceId", None) is None:
179
+ dict["workspaceId"] = dict.get("workspace_id", None)
180
+
181
+ if dict.get("workspaceName", None) is None:
182
+ dict["workspaceName"] = dict.get("workspace_name", None)
183
+
184
+ if dict["deploymentPipelineId"] is None:
185
+ dict["deploymentPipelineId"] = dict["deployment_pipeline_id"]
186
+
187
+ if dict["isPublic"] is None:
188
+ dict["isPublic"] = dict["is_public"]
189
+
190
+
191
+ return Deployment_Pipeline_Stage(id=dict["id"],
192
+ order=dict["order"],
193
+ display_name=dict["displayName"],
194
+ description=dict["description"],
195
+ workspace_id=dict["workspaceId"],
196
+ workspace_name=dict["workspaceName"],
197
+ is_public=dict["isPublic"],
198
+ deployment_pipeline_id=dict["deploymentPipelineId"],
199
+ auth=auth
200
+ )
201
+
202
+ def __str__(self) -> str:
203
+ """Return a string representation of the workspace object"""
204
+ dict_ = {"id": self.id, "order": self.order, "display_name": self.display_name,
205
+ "description": self.description, "workspace_id": self.workspace_id,
206
+ "workspace_name": self.workspace_name,
207
+ "deployment_pipeline_id": self.deployment_pipeline_id,
208
+ "is_public": self.is_public}
209
+
210
+ return json.dumps(dict_, indent=2)
211
+
212
+ def __repr__(self) -> str:
213
+ return self.__str__()
214
+
215
+
216
+ def get_items(self, continuationToken = None):
217
+ """Get items in a deployment pipeline stage"""
218
+ # GET https://api.fabric.microsoft.com/v1/deploymentPipelines/{deploymentPipelineId}/stages/{stageId}/items
219
+
220
+ url = f"https://api.fabric.microsoft.com/v1/deploymentPipelines/{self.deployment_pipeline_id}/stages/{self.id}/items"
221
+ if continuationToken is not None:
222
+ url += f"?continuationToken={continuationToken}"
223
+
224
+ for _ in range(10):
225
+ response = requests.get(url=url, headers=self.auth.get_headers())
226
+ if response.status_code == 429:
227
+ print("Too many requests, waiting 10 seconds")
228
+ sleep(10)
229
+ continue
230
+ if response.status_code not in (200, 429):
231
+ print(response.status_code)
232
+ print(response.text)
233
+ raise Exception(f"Error getting items: {response.text}")
234
+ break
235
+
236
+ resp_dict = json.loads(response.text)
237
+ items = resp_dict["value"]
238
+ return items
239
+
240
+