msfabricpysdkcore 0.0.10__py3-none-any.whl → 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
msfabricpysdkcore/auth.py CHANGED
@@ -26,8 +26,9 @@ class FabricAuth():
26
26
  class FabricAuthClient(FabricAuth):
27
27
  """FabricAuthClient class to interact with Entra ID"""
28
28
 
29
- def __init__(self):
30
- print("Using Azure CLI for authentication")
29
+ def __init__(self, silent = False):
30
+ if not silent:
31
+ print("Using Azure CLI for authentication")
31
32
  self.auth = AzureCliCredential()
32
33
 
33
34
  def get_token(self):
@@ -38,8 +39,9 @@ class FabricAuthClient(FabricAuth):
38
39
  class FabricServicePrincipal(FabricAuth):
39
40
  """FabricServicePrincipal class to interact with Entra ID"""
40
41
 
41
- def __init__(self, tenant_id, client_id, client_secret):
42
- print("Using Service Principal for authentication")
42
+ def __init__(self, tenant_id, client_id, client_secret, silent = False):
43
+ if not silent:
44
+ print("Using Service Principal for authentication")
43
45
 
44
46
  self.tenant_id = tenant_id
45
47
  self.client_id = client_id
@@ -65,9 +67,10 @@ class FabricServicePrincipal(FabricAuth):
65
67
  class FabricSparkUtilsAuthentication(FabricAuth):
66
68
  """FabricSparkUtilsAuthentication class to interact with Entra ID"""
67
69
 
68
- def __init__(self):
70
+ def __init__(self, silent = False):
69
71
  mssparkutils.credentials.getToken("pbi")
70
- print("Using Synapse Spark Utils for authentication")
72
+ if not silent:
73
+ print("Using Synapse Spark Utils for authentication")
71
74
 
72
75
  def get_token(self):
73
76
  """Get token from Azure AD"""
@@ -6,7 +6,7 @@ from msfabricpysdkcore.auth import FabricAuthClient, FabricServicePrincipal, Fab
6
6
  class FabricClient():
7
7
  """FabricClient class to interact with Fabric API"""
8
8
 
9
- def __init__(self, tenant_id = None, client_id = None, client_secret = None) -> None:
9
+ def __init__(self, tenant_id = None, client_id = None, client_secret = None, silent=False) -> None:
10
10
  """Initialize FabricClient object"""
11
11
  self.tenant_id = tenant_id if tenant_id else os.getenv("FABRIC_TENANT_ID")
12
12
  self.client_id = client_id if client_id else os.getenv("FABRIC_CLIENT_ID")
@@ -16,10 +16,11 @@ class FabricClient():
16
16
 
17
17
  if self.client_id is None or self.client_secret is None or self.tenant_id is None:
18
18
  try:
19
- self.auth = FabricSparkUtilsAuthentication()
19
+ self.auth = FabricSparkUtilsAuthentication(silent=silent)
20
20
  except:
21
- self.auth = FabricAuthClient()
21
+ self.auth = FabricAuthClient(silent=silent)
22
22
  else:
23
23
  self.auth = FabricServicePrincipal(tenant_id = self.tenant_id,
24
24
  client_id = self.client_id,
25
- client_secret = self.client_secret)
25
+ client_secret = self.client_secret,
26
+ silent=silent)
@@ -11,9 +11,9 @@ from msfabricpysdkcore.workspace import Workspace
11
11
  class FabricClientCore(FabricClient):
12
12
  """FabricClientCore class to interact with Fabric Core APIs"""
13
13
 
14
- def __init__(self, tenant_id = None, client_id = None, client_secret = None) -> None:
14
+ def __init__(self, tenant_id = None, client_id = None, client_secret = None, silent=False) -> None:
15
15
  """Initialize FabricClientCore object"""
16
- super().__init__(tenant_id, client_id, client_secret)
16
+ super().__init__(tenant_id, client_id, client_secret, silent=silent)
17
17
 
18
18
 
19
19
  def list_workspaces(self, continuationToken = None):
@@ -414,12 +414,20 @@ class FabricClientCore(FabricClient):
414
414
  ws = self.get_workspace_by_id(workspace_id)
415
415
  return ws.get_environment(environment_id).get_staging_settings()
416
416
 
417
- def update_staging_settings(self, workspace_id, environment_id, instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
418
- dynamic_executor_allocation, spark_properties, runtime_version):
419
- """Update staging settings for an environment"""
420
- ws = self.get_workspace_by_id(workspace_id)
421
- return ws.get_environment(environment_id).update_staging_settings(instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
422
- dynamic_executor_allocation, spark_properties, runtime_version)
417
+ def update_staging_settings(self, workspace_id, environment_id,
418
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
419
+ executor_cores = None, executor_memory = None, instance_pool = None,
420
+ runtime_version = None, spark_properties = None):
421
+
422
+ return self.get_environment(workspace_id, environment_id).update_staging_settings(driver_cores=driver_cores,
423
+ driver_memory=driver_memory,
424
+ dynamic_executor_allocation=dynamic_executor_allocation,
425
+ executor_cores=executor_cores,
426
+ executor_memory=executor_memory,
427
+ instance_pool=instance_pool,
428
+ runtime_version=runtime_version,
429
+ spark_properties=spark_properties)
430
+
423
431
 
424
432
  # environmentSparkLibraries
425
433
 
@@ -433,10 +441,10 @@ class FabricClientCore(FabricClient):
433
441
  ws = self.get_workspace_by_id(workspace_id)
434
442
  return ws.get_environment(environment_id).get_staging_libraries()
435
443
 
436
- def update_staging_library(self, workspace_id, environment_id):
444
+ def upload_staging_library(self, workspace_id, environment_id, file_path):
437
445
  """Update staging libraries for an environment"""
438
446
  ws = self.get_workspace_by_id(workspace_id)
439
- return ws.get_environment(environment_id).update_staging_libraries()
447
+ return ws.get_environment(environment_id).upload_staging_library(file_path=file_path)
440
448
 
441
449
  def publish_environment(self, workspace_id, environment_id):
442
450
  """Publish an environment"""
@@ -453,6 +461,33 @@ class FabricClientCore(FabricClient):
453
461
  ws = self.get_workspace_by_id(workspace_id)
454
462
  return ws.get_environment(environment_id).cancel_publish()
455
463
 
464
+ # eventhouses
465
+
466
+ def list_eventhouses(self, workspace_id):
467
+ """List eventhouses in a workspace"""
468
+ ws = self.get_workspace_by_id(workspace_id)
469
+ return ws.list_eventhouses()
470
+
471
+ def create_eventhouse(self, workspace_id, display_name, description = None):
472
+ """Create an eventhouse in a workspace"""
473
+ ws = self.get_workspace_by_id(workspace_id)
474
+ return ws.create_eventhouse(display_name = display_name, description = description)
475
+
476
+ def get_eventhouse(self, workspace_id, eventhouse_id = None, eventhouse_name = None):
477
+ """Get an eventhouse from a workspace"""
478
+ ws = self.get_workspace_by_id(workspace_id)
479
+ return ws.get_eventhouse(eventhouse_id = eventhouse_id, eventhouse_name = eventhouse_name)
480
+
481
+ def delete_eventhouse(self, workspace_id, eventhouse_id):
482
+ """Delete an eventhouse from a workspace"""
483
+ ws = self.get_workspace_by_id(workspace_id)
484
+ return ws.delete_eventhouse(eventhouse_id)
485
+
486
+ def update_eventhouse(self, workspace_id, eventhouse_id, display_name = None, description = None):
487
+ """Update an eventhouse in a workspace"""
488
+ ws = self.get_workspace_by_id(workspace_id)
489
+ return ws.update_eventhouse(eventhouse_id, display_name = display_name, description = description)
490
+
456
491
  # eventstreams
457
492
 
458
493
  def list_eventstreams(self, workspace_id):
@@ -486,6 +521,11 @@ class FabricClientCore(FabricClient):
486
521
  """List kql databases in a workspace"""
487
522
  ws = self.get_workspace_by_id(workspace_id)
488
523
  return ws.list_kql_databases()
524
+
525
+ def create_kql_database(self, workspace_id, creation_payload, display_name, description = None):
526
+ """Create a kql database in a workspace"""
527
+ ws = self.get_workspace_by_id(workspace_id)
528
+ return ws.create_kql_database(creation_payload = creation_payload, display_name = display_name, description = description)
489
529
 
490
530
  def get_kql_database(self, workspace_id, kql_database_id = None, kql_database_name = None):
491
531
  """Get a kql database from a workspace"""
@@ -8,23 +8,13 @@ from msfabricpysdkcore.long_running_operation import check_long_running_operatio
8
8
  class Environment(Item):
9
9
  """Class to represent a item in Microsoft Fabric"""
10
10
 
11
- def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description="",
12
- sparkcompute = None, staging_sparkcompute = None, libraries = None, staging_libraries = None):
11
+ def __init__(self, id, display_name, type, workspace_id, auth, properties = None, definition=None, description=""):
13
12
  super().__init__(id, display_name, type, workspace_id, auth, properties, definition, description)
14
13
 
15
- self.sparkcompute = sparkcompute
16
- self.staging_sparkcompute = staging_sparkcompute
17
- self.libraries = libraries
18
- self.staging_libraries = staging_libraries
19
-
20
14
  def from_dict(item_dict, auth):
21
15
  return Environment(id=item_dict['id'], display_name=item_dict['displayName'], type=item_dict['type'], workspace_id=item_dict['workspaceId'],
22
16
  properties=item_dict.get('properties', None),
23
- definition=item_dict.get('definition', None), description=item_dict.get('description', ""),
24
- sparkcompute=item_dict.get('sparkcompute', None),
25
- staging_sparkcompute=item_dict.get('staging_sparkcompute', None),
26
- libraries=item_dict.get('libraries', None),
27
- staging_libraries=item_dict.get('staging_libraries', None),
17
+ definition=item_dict.get('definition', None), description=item_dict.get('description', ""),
28
18
  auth=auth)
29
19
 
30
20
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/sparkcompute
@@ -39,14 +29,10 @@ class Environment(Item):
39
29
  sleep(10)
40
30
  continue
41
31
  if response.status_code not in (200, 429):
42
- print(response.status_code)
43
- print(response.text)
44
- print(self)
45
- raise Exception(f"Error getting published settings: {response.text}")
32
+ raise Exception(f"Error getting published settings: {response.status_code}, {response.text}")
46
33
  break
47
34
 
48
35
  resp_json = json.loads(response.text)
49
- self.sparkcompute = resp_json
50
36
  return resp_json
51
37
 
52
38
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/sparkcompute
@@ -62,30 +48,38 @@ class Environment(Item):
62
48
  sleep(10)
63
49
  continue
64
50
  if response.status_code not in (200, 429):
65
- print(response.status_code)
66
- print(response.text)
67
- print(self)
68
- raise Exception(f"Error getting staging settings: {response.text}")
51
+ raise Exception(f"Error getting staging settings: {response.status_code}, {response.text}")
69
52
  break
70
53
 
71
54
  resp_json = json.loads(response.text)
72
- self.staging_sparkcompute = resp_json
73
55
  return resp_json
74
56
 
75
- def update_staging_settings(self, instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
76
- dynamic_executor_allocation, spark_properties, runtime_version):
57
+
58
+ def update_staging_settings(self,
59
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
60
+ executor_cores = None, executor_memory = None, instance_pool = None,
61
+ runtime_version = None, spark_properties = None):
77
62
  """Update the staging settings of the environment"""
78
63
  url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/sparkcompute"
79
- body = {
80
- "instancePool": instance_pool,
81
- "driverCores": driver_cores,
82
- "driverMemory": driver_memory,
83
- "executorCores": executor_cores,
84
- "executorMemory": executor_memory,
85
- "dynamicExecutorAllocation": dynamic_executor_allocation,
86
- "sparkProperties": spark_properties,
87
- "runtimeVersion": runtime_version
88
- }
64
+ body = {}
65
+ if driver_cores is not None:
66
+ body['driverCores'] = driver_cores
67
+ if driver_memory is not None:
68
+ body['driverMemory'] = driver_memory
69
+ if dynamic_executor_allocation is not None:
70
+ body['dynamicExecutorAllocation'] = dynamic_executor_allocation
71
+ if executor_cores is not None:
72
+ body['executorCores'] = executor_cores
73
+ if executor_memory is not None:
74
+ body['executorMemory'] = executor_memory
75
+ if instance_pool is not None:
76
+ body['instancePool'] = instance_pool
77
+ if runtime_version is not None:
78
+ body['runtimeVersion'] = runtime_version
79
+ if spark_properties is not None:
80
+ body['sparkProperties'] = spark_properties
81
+
82
+
89
83
  for _ in range(10):
90
84
  response = requests.patch(url=url, headers=self.auth.get_headers(), json=body)
91
85
  if response.status_code == 429:
@@ -93,14 +87,10 @@ class Environment(Item):
93
87
  sleep(10)
94
88
  continue
95
89
  if response.status_code not in (200, 429):
96
- print(response.status_code)
97
- print(response.text)
98
- print(self)
99
- raise Exception(f"Error updating staging settings: {response.text}")
90
+ raise Exception(f"Error updating staging settings: {response.status_code}, {response.text}")
100
91
  break
101
92
 
102
- self.staging_sparkcompute = body
103
- return body
93
+ return json.loads(response.text)
104
94
 
105
95
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/libraries
106
96
 
@@ -115,14 +105,10 @@ class Environment(Item):
115
105
  sleep(10)
116
106
  continue
117
107
  if response.status_code not in (200, 429):
118
- print(response.status_code)
119
- print(response.text)
120
- print(self)
121
- raise Exception(f"Error getting published libraries: {response.text}")
108
+ raise Exception(f"Error getting published libraries: {response.status_code}, {response.text}")
122
109
  break
123
110
 
124
111
  resp_json = json.loads(response.text)
125
- self.libraries = resp_json
126
112
  return resp_json
127
113
 
128
114
  # GET https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries
@@ -138,19 +124,29 @@ class Environment(Item):
138
124
  sleep(10)
139
125
  continue
140
126
  if response.status_code not in (200, 429):
141
- print(response.status_code)
142
- print(response.text)
143
- print(self)
144
- raise Exception(f"Error getting staging libraries: {response.text}")
127
+ raise Exception(f"Error getting staging libraries: {response.status_code}, {response.text}")
145
128
  break
146
129
 
147
130
  resp_json = json.loads(response.text)
148
- self.staging_libraries = resp_json
149
131
  return resp_json
150
132
 
151
133
 
152
- def update_staging_libraries(self):
153
- raise NotImplementedError("This method is not implemented yet because the REST API is not complete")
134
+ def upload_staging_library(self, file_path):
135
+ # POST https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries
136
+ raise NotImplementedError("Not implemented yet")
137
+ # url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.workspace_id}/environments/{self.id}/staging/libraries"
138
+
139
+ # for _ in range(10):
140
+ # response = requests.post(url=url, files={'file': file_path}, headers=self.auth.get_headers())
141
+ # if response.status_code == 429:
142
+ # print("Too many requests, waiting 10 seconds")
143
+ # sleep(10)
144
+ # continue
145
+ # if response.status_code not in (200, 429):
146
+ # raise Exception(f"Error uploading staging libraries: {response.status_code}, {response.text}")
147
+ # break
148
+
149
+ # return json.loads(response.text)
154
150
 
155
151
  # DELETE https://api.fabric.microsoft.com/v1/workspaces/{workspaceId}/environments/{environmentId}/staging/libraries?libraryToDelete={libraryToDelete}
156
152
 
@@ -165,10 +161,7 @@ class Environment(Item):
165
161
  sleep(10)
166
162
  continue
167
163
  if response.status_code not in (200, 429):
168
- print(response.status_code)
169
- print(response.text)
170
- print(self)
171
- raise Exception(f"Error deleting staging libraries: {response.text}")
164
+ raise Exception(f"Error deleting staging libraries: {response.status_code}, {response.text}")
172
165
  break
173
166
 
174
167
  return response.text
@@ -189,10 +182,7 @@ class Environment(Item):
189
182
  publish_info = check_long_running_operation(response.headers, self.auth)
190
183
  return publish_info
191
184
  if response.status_code not in (200, 429):
192
- print(response.status_code)
193
- print(response.text)
194
- print(self)
195
- raise Exception(f"Error publishing staging: {response.text}")
185
+ raise Exception(f"Error publishing staging: {response.status_code}, {response.text}")
196
186
  break
197
187
 
198
188
  resp_dict = json.loads(response.text)
@@ -212,10 +202,7 @@ class Environment(Item):
212
202
  sleep(10)
213
203
  continue
214
204
  if response.status_code not in (200, 429):
215
- print(response.status_code)
216
- print(response.text)
217
- print(self)
218
- raise Exception(f"Error canceling publishing: {response.text}")
205
+ raise Exception(f"Error canceling publishing: {response.status_code}, {response.text}")
219
206
  break
220
207
 
221
208
  resp_dict = json.loads(response.text)
msfabricpysdkcore/item.py CHANGED
@@ -58,10 +58,7 @@ class Item:
58
58
  sleep(10)
59
59
  continue
60
60
  if response.status_code not in (200, 429):
61
- print(response.status_code)
62
- print(response.text)
63
- print(self)
64
- raise Exception(f"Error deleting item: {response.text}")
61
+ raise Exception(f"Error deleting item: {response.status_code}, {response.text}")
65
62
  break
66
63
 
67
64
  return response.status_code
@@ -3,6 +3,38 @@ from time import sleep
3
3
  import requests
4
4
  from msfabricpysdkcore.item import Item
5
5
 
6
+
7
+ class Eventhouse(Item):
8
+ """Class to represent a eventhouse in Microsoft Fabric"""
9
+
10
+ def __init__(self, id, display_name, type, workspace_id, auth, properties = None, description=""):
11
+ super().__init__(id = id, display_name=display_name, type=type,
12
+ workspace_id=workspace_id, auth=auth, properties=properties,
13
+ description=description)
14
+
15
+ def from_dict(item_dict, auth):
16
+ if "displayName" not in item_dict:
17
+ item_dict["displayName"] = item_dict["display_name"]
18
+ if "workspaceId" not in item_dict:
19
+ item_dict["workspaceId"] = item_dict["workspace_id"]
20
+
21
+ return Eventhouse(id=item_dict['id'], display_name=item_dict['displayName'],
22
+ type=item_dict['type'], workspace_id=item_dict['workspaceId'],
23
+ properties=item_dict.get('properties', None),
24
+ description=item_dict.get('description', ""), auth=auth)
25
+
26
+ def create_kql_database(self, display_name = None, description= None):
27
+ from msfabricpysdkcore.coreapi import FabricClientCore
28
+ """Method to create a kql database in the eventhouse"""
29
+ creation_payload = {"databaseType" : "ReadWrite",
30
+ "parentEventhouseItemId" : self.id}
31
+
32
+ fcc = FabricClientCore(silent=True)
33
+
34
+ return fcc.create_kql_database(workspace_id = self.workspace_id,
35
+ display_name = display_name, description = description,
36
+ creation_payload= creation_payload)
37
+
6
38
  class SparkJobDefinition(Item):
7
39
  """Class to represent a spark job definition in Microsoft Fabric"""
8
40
 
@@ -0,0 +1,48 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_data_pipelines(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
+
21
+ dps = fc.list_data_pipelines(workspace_id)
22
+ dp_names = [dp.display_name for dp in dps]
23
+ self.assertGreater(len(dps), 0)
24
+ self.assertIn("pipeline1", dp_names)
25
+
26
+ dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")
27
+ self.assertIsNotNone(dp.id)
28
+ self.assertIsNotNone(dp.definition)
29
+ self.assertEqual(dp.display_name, "pipeline1")
30
+
31
+ dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", description="asda")
32
+ dp_new.update_definition(dp.definition)
33
+
34
+ self.assertEqual(dp_new.display_name, "pipeline_new")
35
+
36
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
37
+
38
+ dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
39
+ self.assertEqual(dp.display_name, "pipeline2")
40
+ self.assertEqual(dp.id, dp2.id)
41
+
42
+ dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline1")
43
+
44
+ dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
45
+ self.assertEqual(dp.display_name, "pipeline1")
46
+ self.assertEqual(dp.id, dp2.id)
47
+ status_code = fc.delete_data_pipeline(workspace_id, dp_new.id)
48
+ self.assertEqual(status_code, 200)
@@ -10,39 +10,56 @@ class TestFabricClientCore(unittest.TestCase):
10
10
 
11
11
  def __init__(self, *args, **kwargs):
12
12
  super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
- #load_dotenv()
14
13
  self.fc = FabricClientCore()
15
- self.workspace_id = "d8a5abe0-9eed-406d-ab46-343bc57ddbe5"
16
14
 
17
-
18
- def test_environments(self):
15
+ def test_environments_crudl(self):
19
16
  fc = self.fc
20
- workspace_id = self.workspace_id
21
-
17
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
22
18
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
23
- envname = "testitem" + datetime_str
24
- env1 = fc.create_environment(workspace_id, envname, "testenv description")
25
-
26
- env2 = fc.get_environment(workspace_id, environment_name=envname)
27
-
28
- self.assertEqual(env1.id, env2.id)
29
-
30
- env_list = fc.list_environments(workspace_id, with_properties=True)
31
- env_names = [env.display_name for env in env_list]
32
-
33
- self.assertIn(envname, env_names)
34
-
35
- env3 = fc.update_environment(workspace_id, env1.id, envname + "name", "testenv description updated")
36
-
37
- env4 = fc.get_environment(workspace_id, environment_name=env3.display_name)
38
-
39
- self.assertEqual(env1.id, env4.id)
40
-
41
- fc.delete_environment(workspace_id, env4.id)
42
-
43
- env_list = fc.list_environments(workspace_id, with_properties=True)
44
- env_names = [env.display_name for env in env_list]
45
-
46
- self.assertNotIn(env4.display_name, env_names)
47
19
 
48
-
20
+ env_name = "env" + datetime_str
21
+ environment1 = fc.create_environment(workspace_id, display_name=env_name)
22
+ self.assertEqual(environment1.display_name, env_name)
23
+
24
+ environments = fc.list_environments(workspace_id)
25
+ environment_names = [env.display_name for env in environments]
26
+ self.assertGreater(len(environments), 0)
27
+ self.assertIn(env_name, environment_names)
28
+
29
+ env = fc.get_environment(workspace_id, environment_name=env_name)
30
+ self.assertIsNotNone(env.id)
31
+ self.assertEqual(env.display_name, env_name)
32
+ new_name = env_name + "2"
33
+ env2 = fc.update_environment(workspace_id, env.id, display_name=new_name)
34
+
35
+ env = fc.get_environment(workspace_id, environment_id=env.id)
36
+ self.assertEqual(env.display_name, new_name)
37
+ self.assertEqual(env.id, env2.id)
38
+
39
+ status_code = fc.delete_environment(workspace_id, env.id)
40
+ self.assertEqual(status_code, 200)
41
+
42
+ def test_environment_details(self):
43
+ fc = FabricClientCore()
44
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
45
+ environment_id = 'fae6d1a7-d671-4091-89b1-f42626deb56f'
46
+ published_settings = fc.get_published_settings(workspace_id=workspace_id, environment_id=environment_id)
47
+ self.assertIsNotNone(published_settings)
48
+ self.assertIn("instancePool", published_settings)
49
+ self.assertIn("dynamicExecutorAllocation", published_settings)
50
+ staging_settings = fc.get_staging_settings(workspace_id=workspace_id, environment_id=environment_id)
51
+ self.assertIsNotNone(staging_settings)
52
+ self.assertIn("instancePool", staging_settings)
53
+ self.assertIn("dynamicExecutorAllocation", staging_settings)
54
+ if staging_settings["driverCores"] == 8:
55
+ driver_cores = 4
56
+ else:
57
+ driver_cores = 8
58
+ updated_settings = fc.update_staging_settings(workspace_id=workspace_id, environment_id=environment_id, driver_cores=driver_cores)
59
+ self.assertIn("instancePool", updated_settings)
60
+ self.assertIn("dynamicExecutorAllocation", updated_settings)
61
+ self.assertEqual(updated_settings["driverCores"], driver_cores)
62
+ updated_settings = fc.get_staging_settings(workspace_id=workspace_id, environment_id=environment_id)
63
+ self.assertIn("instancePool", updated_settings)
64
+ self.assertIn("dynamicExecutorAllocation", updated_settings)
65
+ self.assertEqual(updated_settings["driverCores"], driver_cores)
@@ -0,0 +1,44 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_eventstreams(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
+
21
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
22
+ es_name = "es" + datetime_str
23
+
24
+ eventstream = fc.create_eventstream(workspace_id, display_name=es_name)
25
+ self.assertEqual(eventstream.display_name, es_name)
26
+
27
+ eventstreams = fc.list_eventstreams(workspace_id)
28
+ eventstream_names = [es.display_name for es in eventstreams]
29
+ self.assertGreater(len(eventstreams), 0)
30
+ self.assertIn(es_name, eventstream_names)
31
+
32
+
33
+ es = fc.get_eventstream(workspace_id, eventstream_name=es_name)
34
+ self.assertIsNotNone(es.id)
35
+ self.assertEqual(es.display_name, es_name)
36
+
37
+ es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2")
38
+
39
+ es = fc.get_eventstream(workspace_id, eventstream_id=es.id)
40
+ self.assertEqual(es.display_name, f"{es_name}2")
41
+ self.assertEqual(es.id, es2.id)
42
+
43
+ status_code = fc.delete_eventstream(workspace_id, es.id)
44
+ self.assertEqual(status_code, 200)
@@ -78,8 +78,7 @@ class TestFabricClientCore(unittest.TestCase):
78
78
 
79
79
  fc = self.fc
80
80
 
81
- workspace = fc.get_workspace_by_name("testitems")
82
- workspace_id = workspace.id
81
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
83
82
 
84
83
  list_dashboards = fc.list_dashboards(workspace_id)
85
84
  dashboard_names = [dashboard.display_name for dashboard in list_dashboards]
@@ -144,8 +143,7 @@ class TestFabricClientCore(unittest.TestCase):
144
143
  self.assertIn(table_name, table_names)
145
144
 
146
145
  fc = self.fc
147
- workspace = fc.get_workspace_by_name("testitems")
148
- workspace_id = workspace.id
146
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
149
147
 
150
148
  lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2")
151
149
  self.assertIsNotNone(lakehouse.id)
@@ -171,116 +169,37 @@ class TestFabricClientCore(unittest.TestCase):
171
169
  self.assertEqual(status_code, 200)
172
170
 
173
171
 
174
- def test_data_pipelines(self):
175
-
176
- fc = self.fc
177
- workspace = fc.get_workspace_by_name("testitems")
178
- workspace_id = workspace.id
179
-
180
- dps = fc.list_data_pipelines(workspace_id)
181
- dp_names = [dp.display_name for dp in dps]
182
- self.assertGreater(len(dps), 0)
183
- self.assertIn("pipeline1", dp_names)
184
-
185
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_name="pipeline1")
186
- self.assertIsNotNone(dp.id)
187
- self.assertIsNotNone(dp.definition)
188
- self.assertEqual(dp.display_name, "pipeline1")
189
-
190
- dp_new = fc.create_data_pipeline(workspace_id, display_name="pipeline_new", description="asda")
191
- dp_new.update_definition(dp.definition)
192
-
193
- self.assertEqual(dp_new.display_name, "pipeline_new")
194
-
195
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline2")
196
-
197
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
198
- self.assertEqual(dp.display_name, "pipeline2")
199
- self.assertEqual(dp.id, dp2.id)
200
-
201
- dp2 = fc.update_data_pipeline(workspace_id, dp.id, display_name="pipeline1")
202
-
203
- dp = fc.get_data_pipeline(workspace_id, data_pipeline_id=dp.id)
204
- self.assertEqual(dp.display_name, "pipeline1")
205
- self.assertEqual(dp.id, dp2.id)
206
- status_code = fc.delete_data_pipeline(workspace_id, dp_new.id)
207
- self.assertEqual(status_code, 200)
208
-
209
- def test_eventstreams(self):
210
-
172
+ def test_eventhouses(self):
173
+
211
174
  fc = self.fc
212
- workspace = fc.get_workspace_by_name("testitems")
213
- workspace_id = workspace.id
214
-
215
- datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
216
- es_name = "es" + datetime_str
217
-
218
- eventstream = fc.create_eventstream(workspace_id, display_name=es_name)
219
- self.assertEqual(eventstream.display_name, es_name)
220
-
221
- eventstreams = fc.list_eventstreams(workspace_id)
222
- eventstream_names = [es.display_name for es in eventstreams]
223
- self.assertGreater(len(eventstreams), 0)
224
- self.assertIn(es_name, eventstream_names)
175
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
225
176
 
177
+ eventhouse1 = fc.create_eventhouse(workspace_id, display_name="eventhouse1")
178
+ self.assertEqual(eventhouse1.display_name, "eventhouse1")
226
179
 
227
- es = fc.get_eventstream(workspace_id, eventstream_name=es_name)
228
- self.assertIsNotNone(es.id)
229
- self.assertEqual(es.display_name, es_name)
230
-
231
- es2 = fc.update_eventstream(workspace_id, es.id, display_name=f"{es_name}2")
232
-
233
- es = fc.get_eventstream(workspace_id, eventstream_id=es.id)
234
- self.assertEqual(es.display_name, f"{es_name}2")
235
- self.assertEqual(es.id, es2.id)
236
-
237
- status_code = fc.delete_eventstream(workspace_id, es.id)
238
- self.assertEqual(status_code, 200)
239
-
240
- def test_kql_database(self):
180
+ eventhouses = fc.list_eventhouses(workspace_id)
181
+ eventhouse_names = [eh.display_name for eh in eventhouses]
182
+ self.assertGreater(len(eventhouses), 0)
183
+ self.assertIn("eventhouse1", eventhouse_names)
241
184
 
242
- fc = self.fc
243
- workspace = fc.get_workspace_by_name("testitems")
244
- workspace_id = workspace.id
185
+ eh = fc.get_eventhouse(workspace_id, eventhouse_name="eventhouse1")
186
+ self.assertIsNotNone(eh.id)
187
+ self.assertEqual(eh.display_name, "eventhouse1")
245
188
 
246
- kql_databases = fc.list_kql_databases(workspace_id)
247
- kql_database_names = [kqldb.display_name for kqldb in kql_databases]
248
- self.assertGreater(len(kql_databases), 0)
249
- self.assertIn("kqldatabase1", kql_database_names)
189
+ eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name="eventhouse2")
250
190
 
251
- kqldb = fc.get_kql_database(workspace_id, kql_database_name="kqldatabase1")
252
- self.assertIsNotNone(kqldb.id)
253
- self.assertEqual(kqldb.display_name, "kqldatabase1")
254
-
255
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldb2")
191
+ eh = fc.get_eventhouse(workspace_id, eventhouse_id=eh.id)
192
+ self.assertEqual(eh.display_name, "eventhouse2")
193
+ self.assertEqual(eh.id, eh2.id)
256
194
 
257
- kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
258
- self.assertEqual(kqldb.display_name, "kqldb2")
259
- self.assertEqual(kqldb.id, kqldb2.id)
260
-
261
- # retry 3 times
262
- i = 0
263
- while i < 3:
264
- try:
265
- kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldatabase1")
266
- break
267
- except Exception as e:
268
- sleep(10)
269
- i += 1
270
-
271
-
195
+ status_code = fc.delete_eventhouse(workspace_id, eh.id)
196
+ self.assertEqual(status_code, 200)
272
197
 
273
- kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
274
- self.assertEqual(kqldb.display_name, "kqldatabase1")
275
- self.assertEqual(kqldb.id, kqldb2.id)
276
- # status_code = fc.delete_kql_database(workspace_id, kqldb.id)
277
- # self.assertEqual(status_code, 200)
278
198
 
279
199
  def test_kql_querysets(self):
280
200
 
281
201
  fc = self.fc
282
- workspace = fc.get_workspace_by_name("testitems")
283
- workspace_id = workspace.id
202
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
284
203
 
285
204
  kql_queryset_name = "kqlqueryset1"
286
205
 
@@ -312,8 +231,7 @@ class TestFabricClientCore(unittest.TestCase):
312
231
  def test_ml_experiments(self):
313
232
 
314
233
  fc = self.fc
315
- workspace = fc.get_workspace_by_name("testitems")
316
- workspace_id = workspace.id
234
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
317
235
 
318
236
  ml_experiment = fc.create_ml_experiment(workspace_id, display_name="mlexperiment1")
319
237
  self.assertEqual(ml_experiment.display_name, "mlexperiment1")
@@ -339,8 +257,7 @@ class TestFabricClientCore(unittest.TestCase):
339
257
  def test_ml_models(self):
340
258
 
341
259
  fc = self.fc
342
- workspace = fc.get_workspace_by_name("testitems")
343
- workspace_id = workspace.id
260
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
344
261
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
345
262
  model_name = "mlm" + datetime_str
346
263
 
@@ -368,8 +285,7 @@ class TestFabricClientCore(unittest.TestCase):
368
285
  def test_notebooks(self):
369
286
 
370
287
  fc = self.fc
371
- workspace = fc.get_workspace_by_name("testitems")
372
- workspace_id = workspace.id
288
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
373
289
 
374
290
  notebook_name = "notebook125"
375
291
 
@@ -407,8 +323,7 @@ class TestFabricClientCore(unittest.TestCase):
407
323
  def test_reports(self):
408
324
 
409
325
  fc = self.fc
410
- workspace = fc.get_workspace_by_name("testitems")
411
- workspace_id = workspace.id
326
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
412
327
 
413
328
  report_name = "report1234"
414
329
 
@@ -441,8 +356,7 @@ class TestFabricClientCore(unittest.TestCase):
441
356
  def test_semantic_models(self):
442
357
 
443
358
  fc = self.fc
444
- workspace = fc.get_workspace_by_name("testitems")
445
- workspace_id = workspace.id
359
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
446
360
 
447
361
  semantic_model_name = "semanticmodel1234"
448
362
 
@@ -475,8 +389,7 @@ class TestFabricClientCore(unittest.TestCase):
475
389
  def test_spark_job_definitions(self):
476
390
 
477
391
  fc = self.fc
478
- workspace = fc.get_workspace_by_name("testitems")
479
- workspace_id = workspace.id
392
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
480
393
 
481
394
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
482
395
  spark_job_definition_name = f"sjd{datetime_str}"
@@ -514,8 +427,7 @@ class TestFabricClientCore(unittest.TestCase):
514
427
  def test_warehouses(self):
515
428
 
516
429
  fc = self.fc
517
- workspace = fc.get_workspace_by_name("testitems")
518
- workspace_id = workspace.id
430
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
519
431
 
520
432
  datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
521
433
  warehouse1 = f"wh{datetime_str}"
@@ -0,0 +1,48 @@
1
+ import unittest
2
+ from datetime import datetime
3
+ from dotenv import load_dotenv
4
+ from time import sleep
5
+ from msfabricpysdkcore.coreapi import FabricClientCore
6
+
7
+ load_dotenv()
8
+
9
+ class TestFabricClientCore(unittest.TestCase):
10
+
11
+ def __init__(self, *args, **kwargs):
12
+ super(TestFabricClientCore, self).__init__(*args, **kwargs)
13
+ #load_dotenv()
14
+ self.fc = FabricClientCore()
15
+
16
+ def test_kql_database(self):
17
+
18
+ fc = self.fc
19
+ workspace_id = 'd8a5abe0-9eed-406d-ab46-343bc57ddbe5'
20
+ evenhouse_id = "14822d45-5460-4efa-9b30-8628510a9197"
21
+
22
+ creation_payload = {"databaseType" : "ReadWrite",
23
+ "parentEventhouseItemId" : evenhouse_id}
24
+
25
+ datetime_str = datetime.now().strftime("%Y%m%d%H%M%S")
26
+ kqldb_name = "kql" + datetime_str
27
+ kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name=kqldb_name,
28
+ creation_payload=creation_payload)
29
+ self.assertEqual(kqldb.display_name, kqldb_name)
30
+
31
+ kql_databases = fc.list_kql_databases(workspace_id)
32
+ kql_database_names = [kqldb.display_name for kqldb in kql_databases]
33
+ self.assertGreater(len(kql_databases), 0)
34
+ self.assertIn(kqldb_name, kql_database_names)
35
+
36
+ kqldb = fc.get_kql_database(workspace_id, kql_database_name=kqldb_name)
37
+ self.assertIsNotNone(kqldb.id)
38
+ self.assertEqual(kqldb.display_name, kqldb_name)
39
+
40
+ new_name = kqldb_name+"2"
41
+ kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name=new_name)
42
+
43
+ kqldb = fc.get_kql_database(workspace_id, kql_database_id=kqldb.id)
44
+ self.assertEqual(kqldb.display_name, new_name)
45
+ self.assertEqual(kqldb.id, kqldb2.id)
46
+
47
+ status_code = fc.delete_kql_database(workspace_id, kqldb.id)
48
+ self.assertEqual(status_code, 200)
@@ -105,12 +105,12 @@ class TestFabricClientCore(unittest.TestCase):
105
105
 
106
106
  # def test_update_workspace(self):
107
107
  ws_updated = self.fc.update_workspace(workspace_id=ws.id,
108
- display_name="newname8912389u1293",
108
+ display_name="newn912389u8293",
109
109
  description="new description")
110
- self.assertEqual(ws_updated.display_name, "newname8912389u1293")
110
+ self.assertEqual(ws_updated.display_name, "newn912389u8293")
111
111
  self.assertEqual(ws_updated.description, "new description")
112
112
  ws = self.fc.get_workspace_by_id(id = ws.id)
113
- self.assertEqual(ws.display_name, "newname8912389u1293")
113
+ self.assertEqual(ws.display_name, "newn912389u8293")
114
114
  self.assertEqual(ws.description, "new description")
115
115
 
116
116
  # def test_unassign_from_capacity(self):
@@ -121,7 +121,7 @@ class TestFabricClientCore(unittest.TestCase):
121
121
  self.assertEqual(ws.capacity_id, None)
122
122
 
123
123
  # def test_delete_workspace(self):
124
- result_status = self.fc.delete_workspace(display_name="newname8912389u1293")
124
+ result_status = self.fc.delete_workspace(display_name="newn912389u8293")
125
125
  self.assertEqual(result_status, 200)
126
126
 
127
127
  def test_list_capacities(self):
@@ -6,7 +6,7 @@ from msfabricpysdkcore.lakehouse import Lakehouse
6
6
  from msfabricpysdkcore.environment import Environment
7
7
  from msfabricpysdkcore.long_running_operation import check_long_running_operation
8
8
  from msfabricpysdkcore.otheritems import DataPipeline, Eventstream, KQLDatabase, KQLQueryset, SparkJobDefinition
9
- from msfabricpysdkcore.otheritems import MLExperiment, MLModel, Notebook, Report, SemanticModel, Warehouse
9
+ from msfabricpysdkcore.otheritems import Eventhouse, MLExperiment, MLModel, Notebook, Report, SemanticModel, Warehouse
10
10
  from msfabricpysdkcore.spark_custom_pool import SparkCustomPool
11
11
 
12
12
 
@@ -140,9 +140,7 @@ class Workspace:
140
140
  sleep(10)
141
141
  continue
142
142
  if response.status_code not in (200, 429):
143
- print(response.status_code)
144
- print(response.text)
145
- raise Exception(f"Error updating workspace: {response.text}")
143
+ raise Exception(f"Error updating workspace: {response.status_code}, {response.text}")
146
144
  break
147
145
 
148
146
  assert response.status_code == 200
@@ -229,6 +227,8 @@ class Workspace:
229
227
  return self.get_data_pipeline(item_dict["id"])
230
228
  if item_dict["type"] == "Eventstream":
231
229
  return self.get_eventstream(item_dict["id"])
230
+ if item_dict["type"] == "Eventhouse":
231
+ return self.get_eventhouse(item_dict["id"])
232
232
  if item_dict["type"] == "KQLDatabase":
233
233
  return self.get_kql_database(item_dict["id"])
234
234
  if item_dict["type"] == "KQLQueryset":
@@ -255,7 +255,7 @@ class Workspace:
255
255
  item_obj = Item.from_dict(item_dict, auth=self.auth)
256
256
  return item_obj
257
257
 
258
- def create_item(self, display_name, type, definition = None, description = None):
258
+ def create_item(self, display_name, type, definition = None, description = None, **kwargs):
259
259
  """Create an item in a workspace"""
260
260
 
261
261
  url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/items"
@@ -271,7 +271,9 @@ class Workspace:
271
271
 
272
272
  if type in ["dataPipelines",
273
273
  "environments",
274
- "eventstreams",
274
+ "eventhouses",
275
+ "eventstreams",
276
+ "kqlDatabases",
275
277
  "lakehouses",
276
278
  "mlExperiments",
277
279
  "mlModels",
@@ -281,6 +283,11 @@ class Workspace:
281
283
  "sparkJobDefinitions",
282
284
  "warehouses"]:
283
285
 
286
+ if type == "kqlDatabases":
287
+ if "creation_payload" not in kwargs:
288
+ raise Exception("creation_payload is required for KQLDatabase")
289
+ body["creationPayload"] = kwargs["creation_payload"]
290
+
284
291
  url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/{type}"
285
292
  body.pop('type')
286
293
 
@@ -307,7 +314,9 @@ class Workspace:
307
314
 
308
315
  type_mapping = {"dataPipelines": "DataPipeline",
309
316
  "environments": "Environment",
317
+ "eventhouses": "Eventhouse",
310
318
  "eventstreams": "Eventstream",
319
+ "kqlDatabases": "KQLDatabase",
311
320
  "lakehouses": "Lakehouse",
312
321
  "mlExperiments": "MLExperiment",
313
322
  "mlModels": "MLModel",
@@ -693,7 +702,7 @@ class Workspace:
693
702
 
694
703
  def list_data_pipelines(self, with_properties = False):
695
704
  """List data pipelines in a workspace"""
696
- return self.list_items(with_properties = with_properties, type="dataPipelines")
705
+ return self.list_items(type="dataPipelines", with_properties=with_properties)
697
706
 
698
707
  def get_data_pipeline(self, data_pipeline_id = None, data_pipeline_name = None):
699
708
  """Get a data pipeline from a workspace"""
@@ -721,7 +730,7 @@ class Workspace:
721
730
 
722
731
  def list_environments(self, with_properties = False):
723
732
  """List environments in a workspace"""
724
- return self.list_items(type="environments", with_properties=with_properties)
733
+ return self.list_items(type="environments", with_properties = with_properties)
725
734
 
726
735
  def create_environment(self, display_name, description = None):
727
736
  """Create an environment in a workspace"""
@@ -741,10 +750,6 @@ class Workspace:
741
750
 
742
751
  item_dict = self.get_item_internal(url)
743
752
  env = Environment.from_dict(item_dict, auth=self.auth)
744
- #env.get_staging_settings()
745
- #env.get_published_settings()
746
- #env.get_staging_libraries()
747
- #env.get_published_libraries()
748
753
  return env
749
754
 
750
755
  def delete_environment(self, environment_id):
@@ -765,13 +770,18 @@ class Workspace:
765
770
  def get_staging_settings(self, environment_id):
766
771
  return self.get_environment(environment_id).get_staging_settings()
767
772
 
768
- def update_staging_settings(self, environment_id, instance_pool, driver_cores, driver_memory, executor_cores, executor_memory,
769
- dynamic_executor_allocation, spark_properties, runtime_version):
770
- return self.get_environment(environment_id).update_staging_settings(instance_pool, driver_cores,
771
- driver_memory, executor_cores,
772
- executor_memory,
773
- dynamic_executor_allocation,
774
- spark_properties, runtime_version)
773
+ def update_staging_settings(self, environment_id,
774
+ driver_cores = None, driver_memory = None, dynamic_executor_allocation = None,
775
+ executor_cores = None, executor_memory = None, instance_pool = None,
776
+ runtime_version = None, spark_properties = None):
777
+ return self.get_environment(environment_id).update_staging_settings(driver_cores=driver_cores,
778
+ driver_memory=driver_memory,
779
+ dynamic_executor_allocation=dynamic_executor_allocation,
780
+ executor_cores=executor_cores,
781
+ executor_memory=executor_memory,
782
+ instance_pool=instance_pool,
783
+ runtime_version=runtime_version,
784
+ spark_properties=spark_properties)
775
785
 
776
786
  # environment spark libraries
777
787
 
@@ -781,8 +791,8 @@ class Workspace:
781
791
  def get_staging_libraries(self, environment_id):
782
792
  return self.get_environment(environment_id).get_staging_libraries()
783
793
 
784
- def update_staging_library(self, environment_id):
785
- return self.get_environment(environment_id).update_staging_libraries()
794
+ def upload_staging_library(self, environment_id, file_path):
795
+ return self.get_environment(environment_id).upload_staging_library(file_path)
786
796
 
787
797
  def publish_environment(self, environment_id):
788
798
  return self.get_environment(environment_id).publish_environment()
@@ -793,11 +803,45 @@ class Workspace:
793
803
  def cancel_publish(self, environment_id):
794
804
  return self.get_environment(environment_id).cancel_publish()
795
805
 
806
+ # eventhouses
807
+ def list_eventhouses(self, with_properties = False):
808
+ """List eventhouses in a workspace"""
809
+ return self.list_items(type="eventhouses", with_properties=with_properties)
810
+
811
+ def create_eventhouse(self, display_name, description = None):
812
+ """Create an eventhouse in a workspace"""
813
+ return self.create_item(display_name = display_name,
814
+ type = "eventhouses",
815
+ definition = None,
816
+ description = description)
817
+
818
+ def get_eventhouse(self, eventhouse_id = None, eventhouse_name = None):
819
+ """Get an eventhouse from a workspace"""
820
+ if eventhouse_id is None and eventhouse_name is not None:
821
+ return self.get_item_by_name(eventhouse_name, "Eventhouse")
822
+ elif eventhouse_id is None:
823
+ raise Exception("eventhouse_id or the eventhouse_name is required")
824
+
825
+ url = f"https://api.fabric.microsoft.com/v1/workspaces/{self.id}/eventhouses/{eventhouse_id}"
826
+
827
+ item_dict = self.get_item_internal(url)
828
+ return Eventhouse.from_dict(item_dict, auth=self.auth)
829
+
830
+ def delete_eventhouse(self, eventhouse_id):
831
+ """Delete an eventhouse from a workspace"""
832
+ return self.get_item(item_id=eventhouse_id).delete(type="eventhouses")
833
+
834
+ def update_eventhouse(self, eventhouse_id, display_name = None, description = None):
835
+ """Update an eventhouse in a workspace"""
836
+ return self.get_item(item_id=eventhouse_id).update(display_name=display_name,
837
+ description=description,
838
+ type="eventhouses")
839
+
796
840
  # eventstreams
797
841
 
798
- def list_eventstreams(self):
842
+ def list_eventstreams(self, with_properties = False):
799
843
  """List eventstreams in a workspace"""
800
- return self.list_items(type="eventstreams")
844
+ return self.list_items(type="eventstreams", with_properties=with_properties)
801
845
 
802
846
  def create_eventstream(self, display_name, description = None):
803
847
  """Create an eventstream in a workspace"""
@@ -830,9 +874,16 @@ class Workspace:
830
874
 
831
875
  # kqlDatabases
832
876
 
833
- def list_kql_databases(self):
877
+ def list_kql_databases(self, with_properties = False):
834
878
  """List kql databases in a workspace"""
835
- return self.list_items(type="kqlDatabases")
879
+ return self.list_items(type="kqlDatabases", with_properties = with_properties)
880
+
881
+ def create_kql_database(self, creation_payload, display_name, description = None, ):
882
+ """Create a kql database in a workspace"""
883
+ return self.create_item(display_name = display_name,
884
+ type = "kqlDatabases",
885
+ description = description,
886
+ creation_payload = creation_payload)
836
887
 
837
888
  def get_kql_database(self, kql_database_id = None, kql_database_name = None):
838
889
  """Get a kql database from a workspace"""
@@ -859,9 +910,9 @@ class Workspace:
859
910
 
860
911
  # kqlQuerysets
861
912
 
862
- def list_kql_querysets(self):
913
+ def list_kql_querysets(self, with_properties = False):
863
914
  """List kql querysets in a workspace"""
864
- return self.list_items(type="kqlQuerysets")
915
+ return self.list_items(type="kqlQuerysets", with_properties = with_properties)
865
916
 
866
917
  def get_kql_queryset(self, kql_queryset_id = None, kql_queryset_name = None):
867
918
  """Get a kql queryset from a workspace"""
@@ -925,9 +976,9 @@ class Workspace:
925
976
 
926
977
  # mlExperiments
927
978
 
928
- def list_ml_experiments(self):
979
+ def list_ml_experiments(self, with_properties = False):
929
980
  """List ml experiments in a workspace"""
930
- return self.list_items(type="mlExperiments")
981
+ return self.list_items(type="mlExperiments", with_properties = with_properties)
931
982
 
932
983
  def create_ml_experiment(self, display_name, description = None):
933
984
  """Create an ml experiment in a workspace"""
@@ -960,9 +1011,9 @@ class Workspace:
960
1011
 
961
1012
  # mlModels
962
1013
 
963
- def list_ml_models(self):
1014
+ def list_ml_models(self, with_properties = False):
964
1015
  """List ml models in a workspace"""
965
- return self.list_items(type="mlModels")
1016
+ return self.list_items(type="mlModels", with_properties = with_properties)
966
1017
 
967
1018
  def create_ml_model(self, display_name, description = None):
968
1019
  """Create an ml model in a workspace"""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: msfabricpysdkcore
3
- Version: 0.0.10
3
+ Version: 0.0.11
4
4
  Summary: A Python SDK for Microsoft Fabric
5
5
  Author: Andreas Rederer
6
6
  Project-URL: Homepage, https://github.com/DaSenf1860/ms-fabric-sdk-core
@@ -2,34 +2,37 @@ msfabricpysdkcore/__init__.py,sha256=nh8-lxdMBWYSbQpbRxWkn3ZRpGipmQplTudjskN2l88
2
2
  msfabricpysdkcore/admin_item.py,sha256=OGVfMMqAKy80Ukp8TJnpXoRq8WbWX7Qq1hz7tmfiUBU,4020
3
3
  msfabricpysdkcore/admin_workspace.py,sha256=RctVsXZ57ppiybua69BW_yVpUbrGic6fYOjFPZFPAzA,4862
4
4
  msfabricpysdkcore/adminapi.py,sha256=uithaYq_HWDU-9Et_EFBNQfrjcjnI-NW6OCmWoAsZxk,25649
5
- msfabricpysdkcore/auth.py,sha256=zOU3viJwwYZMefS2yWJZ_LaOePJY8mRRrACIlp0yRkw,2430
5
+ msfabricpysdkcore/auth.py,sha256=v5YkI4jA6T7lv5rjqTK-GEPx2ATDPKQ1LVcaCg98oLM,2562
6
6
  msfabricpysdkcore/capacity.py,sha256=Q_2-XrZtdf9F67fY0qU3D0ocEOGQq4KtIXAv9dXjQhI,1761
7
- msfabricpysdkcore/client.py,sha256=W8J5VR19e6N__ycLvFALo-IHWDznGr5s7YD6e9jC4bE,1197
8
- msfabricpysdkcore/coreapi.py,sha256=TKnxpki4gKmySF4LP8qwveZcZXvbeVWkt0lTcg8hXDI,46168
7
+ msfabricpysdkcore/client.py,sha256=yOZ1CW2ilL8a7J_SRNa-EQbG8xNlJKW3ygniIrsADsw,1300
8
+ msfabricpysdkcore/coreapi.py,sha256=Mjs_YoCISmQXhG7mO4KDWd-4GKZJ7M4hsgis3cee-no,48590
9
9
  msfabricpysdkcore/deployment_pipeline.py,sha256=RFI86rtG-eTpV-_tVl3cXtcTl9ekRvOI5fLsXo9CMVA,9739
10
10
  msfabricpysdkcore/domain.py,sha256=i8jMJEutDRL5XuQ69woCVQEzLS_lm9uUxl4Kp3xtxHc,14722
11
- msfabricpysdkcore/environment.py,sha256=nvBNvmgqQMGWRUhZ9oX0z5Xf8IfPqyRr7ufgsiYt6dY,10271
12
- msfabricpysdkcore/item.py,sha256=Ac24M--C5N5Q0Rny70DVMctRSd8kG3qI-RcvlIWCNrw,11516
11
+ msfabricpysdkcore/environment.py,sha256=ENgy7URz_gJ4E5-OH355QF6vqawBoHjHMbjnoEJfGwM,10072
12
+ msfabricpysdkcore/item.py,sha256=HIr32drqNtlIKKXkXCcWI2H_TZMDalCxvikTyDHNprA,11428
13
13
  msfabricpysdkcore/job_instance.py,sha256=C9kKsV-BIJSeU6DfoTnLlg4DLp-8RYpovs0A-mKwi4o,2745
14
14
  msfabricpysdkcore/lakehouse.py,sha256=nv95SBz_jsssT5dEw622WqtDHUyh5bFFsVTwiqo8s-c,6055
15
15
  msfabricpysdkcore/long_running_operation.py,sha256=gcGVTgA0NToPItrl42Zy1M2eVZtx-vx-SdUryNAm53E,2731
16
16
  msfabricpysdkcore/onelakeshortcut.py,sha256=EYZfP-rl60HdCqJD1O1NXrQTgrYTIw-EWisF4hs4bTU,2102
17
- msfabricpysdkcore/otheritems.py,sha256=6A9PC5ObENeTKE3Pc8klYRxSrx-TwQUMq_P1eLFPF2A,9314
17
+ msfabricpysdkcore/otheritems.py,sha256=TsRZHcqYHw1t6tqKMa3yC0LF9uBj3a1Uzc_lLKZ6LVY,10967
18
18
  msfabricpysdkcore/spark_custom_pool.py,sha256=2H-GkGcDsiKxSpXFSf8Zi7xJI5_krb_8sGdF5vDjEy8,4635
19
- msfabricpysdkcore/workspace.py,sha256=4ZRyRm5pm9aKedURiGhRsh-gm63NxNxmvjDGg0cA0go,61716
19
+ msfabricpysdkcore/workspace.py,sha256=IeT0W0vitc_jH6d9OhRd-SjgDsKYFayQ_g8m9DD0pts,65050
20
20
  msfabricpysdkcore/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  msfabricpysdkcore/tests/test_admin_apis.py,sha256=HTXt72p9FiEf3q9SXyq9aImR6zZo_xIREfB0xPaYazU,2586
22
+ msfabricpysdkcore/tests/test_datapipelines.py,sha256=rIeZfv-MGewYYscb36jmjL8DLZ3Z1dJWv09foR9z_y8,1812
22
23
  msfabricpysdkcore/tests/test_deployment_pipeline.py,sha256=XmqKZXbwBY3P-4O_ce9Wupz9qXHaKxWERwS-RZt38Bs,2024
23
24
  msfabricpysdkcore/tests/test_domains.py,sha256=KFGQyl0G2v4JjX_xYHY-vPSjYxeLEx3AcXMdiRZzClc,4620
24
- msfabricpysdkcore/tests/test_environments.py,sha256=kuEXk9uKOYOQ5YWrlk6hWQWHm176zFP0AoDz3EOV6Oo,1592
25
+ msfabricpysdkcore/tests/test_environments.py,sha256=zSIMhkQscMB7doJEQRBRMvAEBxO1Kl1N85MKXP3yeYI,3120
26
+ msfabricpysdkcore/tests/test_evenstreams.py,sha256=i6hhsOnDJ-BN7kStg7gQhZVPMVaB73K7KJez6pEKlv0,1628
25
27
  msfabricpysdkcore/tests/test_git.py,sha256=IZfWggCyZtHpUbYyacNTyIRfy6rofz-fBEmco_8gMZ4,2415
26
- msfabricpysdkcore/tests/test_items_incl_lakehouse.py,sha256=5fJZf3qqx9yyPsZ-lv5iaTJWnEuJHL39YdFos4ThEoA,23303
28
+ msfabricpysdkcore/tests/test_items_incl_lakehouse.py,sha256=6to6S9ZcRT6Ain7U3LxCpjkKmL7-Uk3VvJ4xfSkrnKg,19910
27
29
  msfabricpysdkcore/tests/test_jobs.py,sha256=DC1nQTav_Re7uunRA07wD_56giLqe9KOsgm56Il8Zr4,1632
30
+ msfabricpysdkcore/tests/test_kqldatabases.py,sha256=Oc2ZzpQAVbiJeZ5ytwlP2Jb-tKe-2e9-1oA-upwRKBQ,1921
28
31
  msfabricpysdkcore/tests/test_shortcuts.py,sha256=TqGLzEWsDyiQ0Gf6JpT_qBHUCgcvYXfVwpXxOay7Qz4,2407
29
32
  msfabricpysdkcore/tests/test_spark.py,sha256=5BCAgHRiuXjIRnGrbvNNh9emq0VyZXlvIWgWAEir5ZQ,3437
30
- msfabricpysdkcore/tests/test_workspaces_capacities.py,sha256=2VHPvZ8GnpFStgUoZ-Al3kVVVimjkAs9YG47NsFl-zo,6563
31
- msfabricpysdkcore-0.0.10.dist-info/LICENSE,sha256=1NrGuF-zOmzbwzk3iI6lsP9koyDeKO1B0-8OD_tTvOQ,1156
32
- msfabricpysdkcore-0.0.10.dist-info/METADATA,sha256=7EPpvWLRc5tXpMQIdAByGXRkT4dF592UEhwoPqruPXY,17455
33
- msfabricpysdkcore-0.0.10.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
34
- msfabricpysdkcore-0.0.10.dist-info/top_level.txt,sha256=3iRonu6ptDGQN4Yl6G76XGM7xbFNsskiEHW-P2gMQGY,18
35
- msfabricpysdkcore-0.0.10.dist-info/RECORD,,
33
+ msfabricpysdkcore/tests/test_workspaces_capacities.py,sha256=X00zIuADhY_7jORIzXrWiXfPBJUhbXUckF-gZJ3m1bQ,6547
34
+ msfabricpysdkcore-0.0.11.dist-info/LICENSE,sha256=1NrGuF-zOmzbwzk3iI6lsP9koyDeKO1B0-8OD_tTvOQ,1156
35
+ msfabricpysdkcore-0.0.11.dist-info/METADATA,sha256=56ie3TOUCJjsXDapUag0-i5-YmcluBQqgRFIgJODj2A,17455
36
+ msfabricpysdkcore-0.0.11.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
37
+ msfabricpysdkcore-0.0.11.dist-info/top_level.txt,sha256=3iRonu6ptDGQN4Yl6G76XGM7xbFNsskiEHW-P2gMQGY,18
38
+ msfabricpysdkcore-0.0.11.dist-info/RECORD,,