msfabricpysdkcore 0.0.10__tar.gz → 0.0.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/PKG-INFO +22 -8
  2. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/README.md +21 -7
  3. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/auth.py +9 -6
  4. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/client.py +5 -4
  5. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/coreapi.py +76 -16
  6. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/environment.py +51 -64
  7. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/item.py +1 -4
  8. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/long_running_operation.py +9 -4
  9. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/otheritems.py +61 -0
  10. msfabricpysdkcore-0.0.12/msfabricpysdkcore/tests/test_datapipelines.py +48 -0
  11. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_deployment_pipeline.py +1 -2
  12. msfabricpysdkcore-0.0.12/msfabricpysdkcore/tests/test_environments.py +65 -0
  13. msfabricpysdkcore-0.0.12/msfabricpysdkcore/tests/test_evenstreams.py +44 -0
  14. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_git.py +2 -2
  15. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_items_incl_lakehouse.py +35 -161
  16. msfabricpysdkcore-0.0.12/msfabricpysdkcore/tests/test_kqldatabases.py +48 -0
  17. msfabricpysdkcore-0.0.12/msfabricpysdkcore/tests/test_sparkjobdefinition.py +55 -0
  18. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_workspaces_capacities.py +60 -50
  19. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/workspace.py +163 -39
  20. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore.egg-info/PKG-INFO +22 -8
  21. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore.egg-info/SOURCES.txt +4 -0
  22. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/pyproject.toml +1 -1
  23. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/setup.py +1 -1
  24. msfabricpysdkcore-0.0.10/msfabricpysdkcore/tests/test_environments.py +0 -48
  25. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/LICENSE +0 -0
  26. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/__init__.py +0 -0
  27. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/admin_item.py +0 -0
  28. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/admin_workspace.py +0 -0
  29. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/adminapi.py +0 -0
  30. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/capacity.py +0 -0
  31. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/deployment_pipeline.py +0 -0
  32. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/domain.py +0 -0
  33. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/job_instance.py +0 -0
  34. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/lakehouse.py +0 -0
  35. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/onelakeshortcut.py +0 -0
  36. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/spark_custom_pool.py +0 -0
  37. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/__init__.py +0 -0
  38. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_admin_apis.py +0 -0
  39. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_domains.py +0 -0
  40. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_jobs.py +0 -0
  41. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_shortcuts.py +0 -0
  42. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore/tests/test_spark.py +0 -0
  43. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore.egg-info/dependency_links.txt +0 -0
  44. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore.egg-info/requires.txt +0 -0
  45. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/msfabricpysdkcore.egg-info/top_level.txt +0 -0
  46. {msfabricpysdkcore-0.0.10 → msfabricpysdkcore-0.0.12}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: msfabricpysdkcore
3
- Version: 0.0.10
3
+ Version: 0.0.12
4
4
  Summary: A Python SDK for Microsoft Fabric
5
5
  Author: Andreas Rederer
6
6
  Project-URL: Homepage, https://github.com/DaSenf1860/ms-fabric-sdk-core
@@ -148,26 +148,40 @@ ws.add_role_assignment(principal = {"id" : "abadfbafb",
148
148
  role = 'Member')
149
149
 
150
150
 
151
- # Get workspace role assignments
152
- fc.get_workspace_role_assignments(workspace_id = ws.id)
151
+ # List workspace role assignments
152
+ fc.list_workspace_role_assignments(workspace_id = ws.id)
153
153
  # or
154
- ws.get_role_assignments()
154
+ ws.list_role_assignments()
155
155
 
156
156
 
157
+ # Get workspace role assignment
158
+ fc.get_workspace_role_assignment(workspace_id = ws.id,
159
+ workspace_role_assignment_id = "dagdasf")
160
+ # or
161
+ ws.get_role_assignment(workspace_role_assignment_id = "fsgdg")
162
+
157
163
  # Update workspace role assignment
158
164
  fc.update_workspace_role_assignment(workspace_id = ws.id,
159
165
  role = "Contributor",
160
- principal_id = "abadfbafb")
166
+ workspace_role_assignment_id = "abadfbafb")
161
167
  # or
162
168
  ws.update_role_assignment(role = "Contributor",
163
- principal_id = "abadfbafb")
169
+ workspace_role_assignment_id = "abadfbafb")
164
170
 
165
171
 
166
172
  # Delete workspace role assignment
167
173
  fc.delete_workspace_role_assignment(workspace_id = ws.id,
168
- principal_id = "abadfbafb")
174
+ workspace_role_assignment_id = "abadfbafb")
169
175
  # or
170
- ws.delete_role_assignment(principal_id = "abadfbafb")
176
+ ws.delete_role_assignment(workspace_role_assignment_id = "abadfbafb")
177
+
178
+
179
+ # Provision Identity
180
+ result = fc.provision_identity(workspace_id=ws.id)
181
+ print(result["applicationId"]))
182
+
183
+ # Deprovision Identity
184
+ fc.deprovision_identity(workspace_id=ws.id)
171
185
 
172
186
  ```
173
187
 
@@ -133,26 +133,40 @@ ws.add_role_assignment(principal = {"id" : "abadfbafb",
133
133
  role = 'Member')
134
134
 
135
135
 
136
- # Get workspace role assignments
137
- fc.get_workspace_role_assignments(workspace_id = ws.id)
136
+ # List workspace role assignments
137
+ fc.list_workspace_role_assignments(workspace_id = ws.id)
138
138
  # or
139
- ws.get_role_assignments()
139
+ ws.list_role_assignments()
140
140
 
141
141
 
142
+ # Get workspace role assignment
143
+ fc.get_workspace_role_assignment(workspace_id = ws.id,
144
+ workspace_role_assignment_id = "dagdasf")
145
+ # or
146
+ ws.get_role_assignment(workspace_role_assignment_id = "fsgdg")
147
+
142
148
  # Update workspace role assignment
143
149
  fc.update_workspace_role_assignment(workspace_id = ws.id,
144
150
  role = "Contributor",
145
- principal_id = "abadfbafb")
151
+ workspace_role_assignment_id = "abadfbafb")
146
152
  # or
147
153
  ws.update_role_assignment(role = "Contributor",
148
- principal_id = "abadfbafb")
154
+ workspace_role_assignment_id = "abadfbafb")
149
155
 
150
156
 
151
157
  # Delete workspace role assignment
152
158
  fc.delete_workspace_role_assignment(workspace_id = ws.id,
153
- principal_id = "abadfbafb")
159
+ workspace_role_assignment_id = "abadfbafb")
154
160
  # or
155
- ws.delete_role_assignment(principal_id = "abadfbafb")
161
+ ws.delete_role_assignment(workspace_role_assignment_id = "abadfbafb")
162
+
163
+
164
+ # Provision Identity
165
+ result = fc.provision_identity(workspace_id=ws.id)
166
+ print(result["applicationId"]))
167
+
168
+ # Deprovision Identity
169
+ fc.deprovision_identity(workspace_id=ws.id)
156
170
 
157
171
  ```
158
172
 
@@ -26,8 +26,9 @@ class FabricAuth():
26
26
  class FabricAuthClient(FabricAuth):
27
27
  """FabricAuthClient class to interact with Entra ID"""
28
28
 
29
- def __init__(self):
30
- print("Using Azure CLI for authentication")
29
+ def __init__(self, silent = False):
30
+ if not silent:
31
+ print("Using Azure CLI for authentication")
31
32
  self.auth = AzureCliCredential()
32
33
 
33
34
  def get_token(self):
@@ -38,8 +39,9 @@ class FabricAuthClient(FabricAuth):
38
39
  class FabricServicePrincipal(FabricAuth):
39
40
  """FabricServicePrincipal class to interact with Entra ID"""
40
41
 
41
- def __init__(self, tenant_id, client_id, client_secret):
42
- print("Using Service Principal for authentication")
42
+ def __init__(self, tenant_id, client_id, client_secret, silent = False):
43
+ if not silent:
44
+ print("Using Service Principal for authentication")
43
45
 
44
46
  self.tenant_id = tenant_id
45
47
  self.client_id = client_id
@@ -65,9 +67,10 @@ class FabricServicePrincipal(FabricAuth):
65
67
  class FabricSparkUtilsAuthentication(FabricAuth):
66
68
  """FabricSparkUtilsAuthentication class to interact with Entra ID"""
67
69
 
68
- def __init__(self):
70
+ def __init__(self, silent = False):
69
71
  mssparkutils.credentials.getToken("pbi")
70
- print("Using Synapse Spark Utils for authentication")
72
+ if not silent:
73
+ print("Using Synapse Spark Utils for authentication")
71
74
 
72
75
  def get_token(self):
73
76
  """Get token from Azure AD"""
@@ -6,7 +6,7 @@ from msfabricpysdkcore.auth import FabricAuthClient, FabricServicePrincipal, Fab
6
6
  class FabricClient():
7
7
  """FabricClient class to interact with Fabric API"""
8
8
 
9
- def __init__(self, tenant_id = None, client_id = None, client_secret = None) -> None:
9
+ def __init__(self, tenant_id = None, client_id = None, client_secret = None, silent=False) -> None:
10
10
  """Initialize FabricClient object"""
11
11
  self.tenant_id = tenant_id if tenant_id else os.getenv("FABRIC_TENANT_ID")
12
12
  self.client_id = client_id if client_id else os.getenv("FABRIC_CLIENT_ID")
@@ -16,10 +16,11 @@ class FabricClient():
16
16
 
17
17
  if self.client_id is None or self.client_secret is None or self.tenant_id is None:
18
18
  try:
19
- self.auth = FabricSparkUtilsAuthentication()
19
+ self.auth = FabricSparkUtilsAuthentication(silent=silent)
20
20
  except:
21
- self.auth = FabricAuthClient()
21
+ self.auth = FabricAuthClient(silent=silent)
22
22
  else:
23
23
  self.auth = FabricServicePrincipal(tenant_id = self.tenant_id,
24
24
  client_id = self.client_id,
25
- client_secret = self.client_secret)
25
+ client_secret = self.client_secret,
26
+ silent=silent)
@@ -11,9 +11,9 @@ from msfabricpysdkcore.workspace import Workspace
11
11
  class FabricClientCore(FabricClient):
12
12
  """FabricClientCore class to interact with Fabric Core APIs"""
13
13
 
14
- def __init__(self, tenant_id = None, client_id = None, client_secret = None) -> None:
14
+ def __init__(self, tenant_id = None, client_id = None, client_secret = None, silent=False) -> None:
15
15
  """Initialize FabricClientCore object"""
16
- super().__init__(tenant_id, client_id, client_secret)
16
+ super().__init__(tenant_id, client_id, client_secret, silent=silent)
17
17
 
18
18
 
19
19
  def list_workspaces(self, continuationToken = None):
@@ -81,10 +81,10 @@ class FabricClientCore(FabricClient):
81
81
  return self.get_workspace_by_name(name)
82
82
  raise ValueError("Either id or name must be provided")
83
83
 
84
- def get_workspace_role_assignments(self, workspace_id):
84
+ def list_workspace_role_assignments(self, workspace_id):
85
85
  """Get role assignments for a workspace"""
86
86
  ws = self.get_workspace_by_id(workspace_id)
87
- return ws.get_role_assignments()
87
+ return ws.list_role_assignments()
88
88
 
89
89
  def create_workspace(self, display_name, capacity_id = None, description = None, exists_ok = True):
90
90
  """Create a workspace"""
@@ -129,20 +129,25 @@ class FabricClientCore(FabricClient):
129
129
  ws = self.get_workspace_by_id(workspace_id)
130
130
  return ws.add_role_assignment(role, principal)
131
131
 
132
- def delete_workspace_role_assignment(self, workspace_id, principal_id):
132
+ def delete_workspace_role_assignment(self, workspace_id, workspace_role_assignment_id):
133
133
  """Delete a role assignment from a workspace"""
134
134
  ws = self.get_workspace_by_id(workspace_id)
135
- return ws.delete_role_assignment(principal_id)
135
+ return ws.delete_role_assignment(workspace_role_assignment_id)
136
136
 
137
137
  def update_workspace(self, workspace_id, display_name = None, description = None):
138
138
  """Update a workspace"""
139
139
  ws = self.get_workspace_by_id(workspace_id)
140
140
  return ws.update(display_name, description)
141
+
142
+ def get_workspace_role_assignment(self, workspace_id, workspace_role_assignment_id):
143
+ """Get a role assignment for a workspace"""
144
+ ws = self.get_workspace_by_id(workspace_id)
145
+ return ws.get_role_assignment(workspace_role_assignment_id)
141
146
 
142
- def update_workspace_role_assignment(self, workspace_id, role, principal_id):
147
+ def update_workspace_role_assignment(self, workspace_id, role, workspace_role_assignment_id):
143
148
  """Update a role assignment for a workspace"""
144
149
  ws = self.get_workspace_by_id(workspace_id)
145
- return ws.update_role_assignment(role, principal_id)
150
+ return ws.update_role_assignment(role, workspace_role_assignment_id)
146
151
 
147
152
  def assign_to_capacity(self, workspace_id, capacity_id):
148
153
  """Assign a workspace to a capacity"""
@@ -153,6 +158,16 @@ class FabricClientCore(FabricClient):
153
158
  """Unassign a workspace from a capacity"""
154
159
  ws = self.get_workspace_by_id(workspace_id)
155
160
  return ws.unassign_from_capacity()
161
+
162
+ def provision_identity(self, workspace_id):
163
+ """Provision an identity for a workspace"""
164
+ ws = self.get_workspace_by_id(workspace_id)
165
+ return ws.provision_identity()
166
+
167
+ def deprovision_identity(self, workspace_id):
168
+ """Deprovision an identity for a workspace"""
169
+ ws = self.get_workspace_by_id(workspace_id)
170
+ return ws.deprovision_identity()
156
171
 
157
172
  def list_capacities(self, continuationToken = None):
158
173
  """List all capacities in the tenant"""
@@ -414,12 +429,20 @@ class FabricClientCore(FabricClient):
414
429
  ws = self.get_workspace_by_id(workspace_id)
415
430
  return ws.get_environment(environment_id).get_staging_settings()
416
431
 
417
- def update_staging_settings(self, workspace_id, environment_id, instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
418
- dynamic_executor_allocation, spark_properties, runtime_version):
419
- """Update staging settings for an environment"""
420
- ws = self.get_workspace_by_id(workspace_id)
421
- return ws.get_environment(environment_id).update_staging_settings(instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
422
- dynamic_executor_allocation, spark_properties, runtime_version)
432
+ def update_staging_settings(self, workspace_id, environment_id,
433
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
434
+ executor_cores = None, executor_memory = None, instance_pool = None,
435
+ runtime_version = None, spark_properties = None):
436
+
437
+ return self.get_environment(workspace_id, environment_id).update_staging_settings(driver_cores=driver_cores,
438
+ driver_memory=driver_memory,
439
+ dynamic_executor_allocation=dynamic_executor_allocation,
440
+ executor_cores=executor_cores,
441
+ executor_memory=executor_memory,
442
+ instance_pool=instance_pool,
443
+ runtime_version=runtime_version,
444
+ spark_properties=spark_properties)
445
+
423
446
 
424
447
  # environmentSparkLibraries
425
448
 
@@ -433,10 +456,10 @@ class FabricClientCore(FabricClient):
433
456
  ws = self.get_workspace_by_id(workspace_id)
434
457
  return ws.get_environment(environment_id).get_staging_libraries()
435
458
 
436
- def update_staging_library(self, workspace_id, environment_id):
459
+ def upload_staging_library(self, workspace_id, environment_id, file_path):
437
460
  """Update staging libraries for an environment"""
438
461
  ws = self.get_workspace_by_id(workspace_id)
439
- return ws.get_environment(environment_id).update_staging_libraries()
462
+ return ws.get_environment(environment_id).upload_staging_library(file_path=file_path)
440
463
 
441
464
  def publish_environment(self, workspace_id, environment_id):
442
465
  """Publish an environment"""
@@ -453,6 +476,33 @@ class FabricClientCore(FabricClient):
453
476
  ws = self.get_workspace_by_id(workspace_id)
454
477
  return ws.get_environment(environment_id).cancel_publish()
455
478
 
479
+ # eventhouses
480
+
481
+ def list_eventhouses(self, workspace_id):
482
+ """List eventhouses in a workspace"""
483
+ ws = self.get_workspace_by_id(workspace_id)
484
+ return ws.list_eventhouses()
485
+
486
+ def create_eventhouse(self, workspace_id, display_name, description = None):
487
+ """Create an eventhouse in a workspace"""
488
+ ws = self.get_workspace_by_id(workspace_id)
489
+ return ws.create_eventhouse(display_name = display_name, description = description)
490
+
491
+ def get_eventhouse(self, workspace_id, eventhouse_id = None, eventhouse_name = None):
492
+ """Get an eventhouse from a workspace"""
493
+ ws = self.get_workspace_by_id(workspace_id)
494
+ return ws.get_eventhouse(eventhouse_id = eventhouse_id, eventhouse_name = eventhouse_name)
495
+
496
+ def delete_eventhouse(self, workspace_id, eventhouse_id):
497
+ """Delete an eventhouse from a workspace"""
498
+ ws = self.get_workspace_by_id(workspace_id)
499
+ return ws.delete_eventhouse(eventhouse_id)
500
+
501
+ def update_eventhouse(self, workspace_id, eventhouse_id, display_name = None, description = None):
502
+ """Update an eventhouse in a workspace"""
503
+ ws = self.get_workspace_by_id(workspace_id)
504
+ return ws.update_eventhouse(eventhouse_id, display_name = display_name, description = description)
505
+
456
506
  # eventstreams
457
507
 
458
508
  def list_eventstreams(self, workspace_id):
@@ -486,6 +536,11 @@ class FabricClientCore(FabricClient):
486
536
  """List kql databases in a workspace"""
487
537
  ws = self.get_workspace_by_id(workspace_id)
488
538
  return ws.list_kql_databases()
539
+
540
+ def create_kql_database(self, workspace_id, creation_payload, display_name, description = None):
541
+ """Create a kql database in a workspace"""
542
+ ws = self.get_workspace_by_id(workspace_id)
543
+ return ws.create_kql_database(creation_payload = creation_payload, display_name = display_name, description = description)
489
544
 
490
545
  def get_kql_database(self, workspace_id, kql_database_id = None, kql_database_name = None):
491
546
  """Get a kql database from a workspace"""
@@ -769,6 +824,11 @@ class FabricClientCore(FabricClient):
769
824
  ws = self.get_workspace_by_id(workspace_id)
770
825
  return ws.update_spark_job_definition_definition(spark_job_definition_id, definition)
771
826
 
827
+ def run_on_demand_spark_job_definition(self, workspace_id, spark_job_definition_id, job_type = "sparkjob"):
828
+ """Run an on demand spark job definition"""
829
+ ws = self.get_workspace_by_id(workspace_id)
830
+ return ws.run_on_demand_spark_job_definition(spark_job_definition_id, job_type)
831
+
772
832
  # warehouses
773
833
 
774
834
  def list_warehouses(self, workspace_id, with_properties = False):
@@ -8,23 +8,13 @@ from msfabricpysdkcore.long_running_operation import check_long_running_operatio
8
8
  class Environment(Item):
9
9
  """Class to represent a item in Microsoft Fabric"""
10
10
 
11
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description="",
12
- sparkcompute = None, staging_sparkcompute = None, libraries = None, staging_libraries = None):
11
+ def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
13
12
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
14
13
 
15
- self.sparkcompute = sparkcompute
16
- self.staging_sparkcompute = staging_sparkcompute
17
- self.libraries = libraries
18
- self.staging_libraries = staging_libraries
19
-
20
14
  def from_dict(item_dict, auth):
21
15
  return Environment(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
22
16
  properties=item_dict.get('properties', None),
23
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""),
24
- sparkcompute=item_dict.get('sparkcompute', None),
25
- staging_sparkcompute=item_dict.get('staging_sparkcompute', None),
26
- libraries=item_dict.get('libraries', None),
27
- staging_libraries=item_dict.get('staging_libraries', None),
17
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""),
28
18
  auth=auth)
29
19
 
30
20
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/sparkcompute
@@ -39,14 +29,10 @@ class Environment(Item):
39
29
  sleep(10)
40
30
  continue
41
31
  if response.status_code not in (200, 429):
42
- print(response.status_code)
43
- print(response.text)
44
- print(self)
45
- raise Exception(f"Error getting published settings: {response.text}")
32
+ raise Exception(f"Error getting published settings: {response.status_code}, {response.text}")
46
33
  break
47
34
 
48
35
  resp_json = json.loads(response.text)
49
- self.sparkcompute = resp_json
50
36
  return resp_json
51
37
 
52
38
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/sparkcompute
@@ -62,30 +48,38 @@ class Environment(Item):
62
48
  sleep(10)
63
49
  continue
64
50
  if response.status_code not in (200, 429):
65
- print(response.status_code)
66
- print(response.text)
67
- print(self)
68
- raise Exception(f"Error getting staging settings: {response.text}")
51
+ raise Exception(f"Error getting staging settings: {response.status_code}, {response.text}")
69
52
  break
70
53
 
71
54
  resp_json = json.loads(response.text)
72
- self.staging_sparkcompute = resp_json
73
55
  return resp_json
74
56
 
75
- def update_staging_settings(self, instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
76
- dynamic_executor_allocation, spark_properties, runtime_version):
57
+
58
+ def update_staging_settings(self,
59
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
60
+ executor_cores = None, executor_memory = None, instance_pool = None,
61
+ runtime_version = None, spark_properties = None):
77
62
  """Update the staging settings of the environment"""
78
63
  url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/sparkcompute"
79
- body = {
80
- "instancePool": instance_pool,
81
- "driverCores": driver_cores,
82
- "driverMemory": driver_memory,
83
- "executorCores": executor_cores,
84
- "executorMemory": executor_memory,
85
- "dynamicExecutorAllocation": dynamic_executor_allocation,
86
- "sparkProperties": spark_properties,
87
- "runtimeVersion": runtime_version
88
- }
64
+ body = {}
65
+ if driver_cores is not None:
66
+ body['driverCores'] = driver_cores
67
+ if driver_memory is not None:
68
+ body['driverMemory'] = driver_memory
69
+ if dynamic_executor_allocation is not None:
70
+ body['dynamicExecutorAllocation'] = dynamic_executor_allocation
71
+ if executor_cores is not None:
72
+ body['executorCores'] = executor_cores
73
+ if executor_memory is not None:
74
+ body['executorMemory'] = executor_memory
75
+ if instance_pool is not None:
76
+ body['instancePool'] = instance_pool
77
+ if runtime_version is not None:
78
+ body['runtimeVersion'] = runtime_version
79
+ if spark_properties is not None:
80
+ body['sparkProperties'] = spark_properties
81
+
82
+
89
83
  for _ in range(10):
90
84
  response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
91
85
  if response.status_code == 429:
@@ -93,14 +87,10 @@ class Environment(Item):
93
87
  sleep(10)
94
88
  continue
95
89
  if response.status_code not in (200, 429):
96
- print(response.status_code)
97
- print(response.text)
98
- print(self)
99
- raise Exception(f"Error updating staging settings: {response.text}")
90
+ raise Exception(f"Error updating staging settings: {response.status_code}, {response.text}")
100
91
  break
101
92
 
102
- self.staging_sparkcompute = body
103
- return body
93
+ return json.loads(response.text)
104
94
 
105
95
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/libraries
106
96
 
@@ -115,14 +105,10 @@ class Environment(Item):
115
105
  sleep(10)
116
106
  continue
117
107
  if response.status_code not in (200, 429):
118
- print(response.status_code)
119
- print(response.text)
120
- print(self)
121
- raise Exception(f"Error getting published libraries: {response.text}")
108
+ raise Exception(f"Error getting published libraries: {response.status_code}, {response.text}")
122
109
  break
123
110
 
124
111
  resp_json = json.loads(response.text)
125
- self.libraries = resp_json
126
112
  return resp_json
127
113
 
128
114
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries
@@ -138,19 +124,29 @@ class Environment(Item):
138
124
  sleep(10)
139
125
  continue
140
126
  if response.status_code not in (200, 429):
141
- print(response.status_code)
142
- print(response.text)
143
- print(self)
144
- raise Exception(f"Error getting staging libraries: {response.text}")
127
+ raise Exception(f"Error getting staging libraries: {response.status_code}, {response.text}")
145
128
  break
146
129
 
147
130
  resp_json = json.loads(response.text)
148
- self.staging_libraries = resp_json
149
131
  return resp_json
150
132
 
151
133
 
152
- def update_staging_libraries(self):
153
- raise NotImplementedError("This method is not implemented yet because the REST API is not complete")
134
+ def upload_staging_library(self, file_path):
135
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries
136
+ raise NotImplementedError("Not implemented yet")
137
+ # url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/libraries"
138
+
139
+ # for _ in range(10):
140
+ # response = requests.post(url=url, files={'file': file_path}, headers=self.auth.get_headers())
141
+ # if response.status_code == 429:
142
+ # print("Too many requests, waiting 10 seconds")
143
+ # sleep(10)
144
+ # continue
145
+ # if response.status_code not in (200, 429):
146
+ # raise Exception(f"Error uploading staging libraries: {response.status_code}, {response.text}")
147
+ # break
148
+
149
+ # return json.loads(response.text)
154
150
 
155
151
  # DELETE https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries?libraryToDelete={libraryToDelete}
156
152
 
@@ -165,10 +161,7 @@ class Environment(Item):
165
161
  sleep(10)
166
162
  continue
167
163
  if response.status_code not in (200, 429):
168
- print(response.status_code)
169
- print(response.text)
170
- print(self)
171
- raise Exception(f"Error deleting staging libraries: {response.text}")
164
+ raise Exception(f"Error deleting staging libraries: {response.status_code}, {response.text}")
172
165
  break
173
166
 
174
167
  return response.text
@@ -189,10 +182,7 @@ class Environment(Item):
189
182
  publish_info = check_long_running_operation(response.headers, self.auth)
190
183
  return publish_info
191
184
  if response.status_code not in (200, 429):
192
- print(response.status_code)
193
- print(response.text)
194
- print(self)
195
- raise Exception(f"Error publishing staging: {response.text}")
185
+ raise Exception(f"Error publishing staging: {response.status_code}, {response.text}")
196
186
  break
197
187
 
198
188
  resp_dict = json.loads(response.text)
@@ -212,10 +202,7 @@ class Environment(Item):
212
202
  sleep(10)
213
203
  continue
214
204
  if response.status_code not in (200, 429):
215
- print(response.status_code)
216
- print(response.text)
217
- print(self)
218
- raise Exception(f"Error canceling publishing: {response.text}")
205
+ raise Exception(f"Error canceling publishing: {response.status_code}, {response.text}")
219
206
  break
220
207
 
221
208
  resp_dict = json.loads(response.text)
@@ -58,10 +58,7 @@ class Item:
58
58
  sleep(10)
59
59
  continue
60
60
  if response.status_code not in (200, 429):
61
- print(response.status_code)
62
- print(response.text)
63
- print(self)
64
- raise Exception(f"Error deleting item: {response.text}")
61
+ raise Exception(f"Error deleting item: {response.status_code}, {response.text}")
65
62
  break
66
63
 
67
64
  return response.status_code
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  import requests
3
- from time import sleep
3
+ from time import sleep, time
4
4
 
5
5
  class LongRunningOperation:
6
6
  """Class to represent a workspace in Microsoft Fabric"""
@@ -49,12 +49,17 @@ class LongRunningOperation:
49
49
  def wait_for_completion(self):
50
50
  """Wait for the operation to complete"""
51
51
  max_iter = 20
52
+ start_time = time()
52
53
  while self.state not in ('Succeeded', 'Failed'):
53
54
  self.state = self.get_operation_state()["status"]
55
+ duration = int(time() - start_time)
56
+ if duration > 60:
57
+
58
+ if self.state == "Running":
59
+ print(f"Operation did not complete after {duration} seconds")
60
+ return "Running"
61
+ raise Exception(f"Operation did not complete after {duration} seconds")
54
62
  sleep(3)
55
- if max_iter == 0:
56
- raise Exception("Operation did not complete after 60 seconds")
57
- max_iter -= 1
58
63
  return self.state
59
64
 
60
65
 
@@ -2,6 +2,39 @@ import json
2
2
  from time import sleep
3
3
  import requests
4
4
  from msfabricpysdkcore.item import Item
5
+ from msfabricpysdkcore.long_running_operation import check_long_running_operation
6
+
7
+
8
+ class Eventhouse(Item):
9
+ """Class to represent a eventhouse in Microsoft Fabric"""
10
+
11
+ def __init__(self, id, display_name, type, workspace_id, auth, properties = None, description=""):
12
+ super().__init__(id = id, display_name=display_name, type=type,
13
+ workspace_id=workspace_id, auth=auth, properties=properties,
14
+ description=description)
15
+
16
+ def from_dict(item_dict, auth):
17
+ if "displayName" not in item_dict:
18
+ item_dict["displayName"] = item_dict["display_name"]
19
+ if "workspaceId" not in item_dict:
20
+ item_dict["workspaceId"] = item_dict["workspace_id"]
21
+
22
+ return Eventhouse(id=item_dict['id'], display_name=item_dict['displayName'],
23
+ type=item_dict['type'], workspace_id=item_dict['workspaceId'],
24
+ properties=item_dict.get('properties', None),
25
+ description=item_dict.get('description', ""), auth=auth)
26
+
27
+ def create_kql_database(self, display_name = None, description= None):
28
+ from msfabricpysdkcore.coreapi import FabricClientCore
29
+ """Method to create a kql database in the eventhouse"""
30
+ creation_payload = {"databaseType" : "ReadWrite",
31
+ "parentEventhouseItemId" : self.id}
32
+
33
+ fcc = FabricClientCore(silent=True)
34
+
35
+ return fcc.create_kql_database(workspace_id = self.workspace_id,
36
+ display_name = display_name, description = description,
37
+ creation_payload= creation_payload)
5
38
 
6
39
  class SparkJobDefinition(Item):
7
40
  """Class to represent a spark job definition in Microsoft Fabric"""
@@ -20,6 +53,34 @@ class SparkJobDefinition(Item):
20
53
  def update_definition(self, definition):
21
54
  return super().update_definition(definition=definition, type="sparkJobDefinitions")
22
55
 
56
+ def run_on_demand_spark_job_definition(self, job_type = "sparkjob"):
57
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/sparkJobDefinitions/{sparkJobDefinitionId}/jobs/instances?jobType={jobType}
58
+ """Method to run a spark job definition on demand"""
59
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/sparkJobDefinitions/{self.id}/jobs/instances?jobType={job_type}"
60
+
61
+ for _ in range(10):
62
+ response = requests.post(url=url, headers=self.auth.get_headers())
63
+ if response.status_code == 429:
64
+ print("Too many requests, waiting 10 seconds")
65
+ sleep(10)
66
+ continue
67
+ if response.status_code == 202:
68
+ location = response.headers['Location']
69
+ job_instance_id = location.split('/')[-1]
70
+
71
+ from msfabricpysdkcore import FabricClientCore
72
+ fc = FabricClientCore(silent=True)
73
+ fc.auth = self.auth
74
+ return fc.get_item_job_instance(workspace_id = self.workspace_id,
75
+ item_id = self.id,
76
+ job_instance_id = job_instance_id)
77
+ if response.status_code not in (200, 201, 202, 429):
78
+ raise Exception(f"Error running on demand spark job definition: {response.status_code}, {response.text}")
79
+ break
80
+
81
+ return response
82
+
83
+
23
84
  class Warehouse(Item):
24
85
  """Class to represent a warehouse in Microsoft Fabric"""
25
86