lexsi-sdk 0.1.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. lexsi_sdk/__init__.py +5 -0
  2. lexsi_sdk/client/__init__.py +0 -0
  3. lexsi_sdk/client/client.py +176 -0
  4. lexsi_sdk/common/__init__.py +0 -0
  5. lexsi_sdk/common/config/.env.prod +3 -0
  6. lexsi_sdk/common/constants.py +143 -0
  7. lexsi_sdk/common/enums.py +8 -0
  8. lexsi_sdk/common/environment.py +49 -0
  9. lexsi_sdk/common/monitoring.py +81 -0
  10. lexsi_sdk/common/trigger.py +75 -0
  11. lexsi_sdk/common/types.py +122 -0
  12. lexsi_sdk/common/utils.py +93 -0
  13. lexsi_sdk/common/validation.py +110 -0
  14. lexsi_sdk/common/xai_uris.py +197 -0
  15. lexsi_sdk/core/__init__.py +0 -0
  16. lexsi_sdk/core/agent.py +62 -0
  17. lexsi_sdk/core/alert.py +56 -0
  18. lexsi_sdk/core/case.py +618 -0
  19. lexsi_sdk/core/dashboard.py +131 -0
  20. lexsi_sdk/core/guardrails/__init__.py +0 -0
  21. lexsi_sdk/core/guardrails/guard_template.py +299 -0
  22. lexsi_sdk/core/guardrails/guardrail_autogen.py +554 -0
  23. lexsi_sdk/core/guardrails/guardrails_langgraph.py +525 -0
  24. lexsi_sdk/core/guardrails/guardrails_openai.py +541 -0
  25. lexsi_sdk/core/guardrails/openai_runner.py +1328 -0
  26. lexsi_sdk/core/model_summary.py +110 -0
  27. lexsi_sdk/core/organization.py +549 -0
  28. lexsi_sdk/core/project.py +5131 -0
  29. lexsi_sdk/core/synthetic.py +387 -0
  30. lexsi_sdk/core/text.py +595 -0
  31. lexsi_sdk/core/tracer.py +208 -0
  32. lexsi_sdk/core/utils.py +36 -0
  33. lexsi_sdk/core/workspace.py +325 -0
  34. lexsi_sdk/core/wrapper.py +766 -0
  35. lexsi_sdk/core/xai.py +306 -0
  36. lexsi_sdk/version.py +34 -0
  37. lexsi_sdk-0.1.16.dist-info/METADATA +100 -0
  38. lexsi_sdk-0.1.16.dist-info/RECORD +40 -0
  39. lexsi_sdk-0.1.16.dist-info/WHEEL +5 -0
  40. lexsi_sdk-0.1.16.dist-info/top_level.txt +1 -0
@@ -0,0 +1,110 @@
1
+ from lexsi_sdk.client.client import APIClient
2
+ from lexsi_sdk.common.xai_uris import GET_PROJECT_CONFIG, MODEL_SVG_URI
3
+ from typing import Dict, Optional
4
+ from pydantic import BaseModel, ConfigDict
5
+ import plotly.graph_objects as go
6
+ from IPython.display import SVG, display
7
+
8
+
9
+ class ModelSummary(BaseModel):
10
+ """Container for model metadata and visualization helpers."""
11
+
12
+ project_name: str
13
+ project_type: str
14
+ unique_identifier: str
15
+ true_label: str
16
+ pred_label: Optional[str] = None
17
+ metadata: Dict
18
+ model_results: Dict
19
+ is_automl: bool
20
+ Source: str
21
+
22
+ model_config = ConfigDict(protected_namespaces=())
23
+
24
+ api_client: APIClient
25
+
26
+ def __init__(self, **kwargs):
27
+ """Store API client reference for subsequent calls."""
28
+ super().__init__(**kwargs)
29
+ self.api_client = kwargs.get("api_client")
30
+
31
+ def info(self) -> dict:
32
+ """Model Info
33
+
34
+ :return: model info dict
35
+ """
36
+ info = {
37
+ "source": self.Source,
38
+ "model_name": self.model_results.get("model_name"),
39
+ "model_type": self.model_results.get("model_type"),
40
+ "model_param": self.model_results.get("model_params"),
41
+ "data_tags_used_for_modelling": self.model_results.get("data_used_tags"),
42
+ "modelling_info": self.model_results.get("modelling_info"),
43
+ }
44
+
45
+ return info
46
+
47
+ def feature_importance(self, xai_method: str):
48
+ """Global features plot"""
49
+ global_features = None
50
+ if xai_method=="shap":
51
+ global_features = self.model_results.get("GFI", {}).get("shap_gfi", None)
52
+ if xai_method=="lime":
53
+ global_features = self.model_results.get("GFI", {}).get("lime_gfi", None)
54
+ # if not global_features:
55
+ # global_features = self.model_results.get("GFI")
56
+ if not global_features:
57
+ return f"No feature importance found for {xai_method}"
58
+ fig = go.Figure()
59
+
60
+ fig.add_trace(
61
+ go.Bar(
62
+ y=list(global_features.keys()),
63
+ x=list(global_features.values()),
64
+ orientation="h",
65
+ )
66
+ )
67
+
68
+ fig.update_layout(
69
+ title="Global Feaure",
70
+ xaxis_title="Values",
71
+ yaxis_title="Features",
72
+ width=800,
73
+ height=600,
74
+ yaxis_autorange="reversed",
75
+ bargap=0.01,
76
+ legend_orientation="h",
77
+ legend_x=0.1,
78
+ legend_y=1.1,
79
+ )
80
+
81
+ fig.show(config={"displaylogo": False})
82
+
83
+ def prediction_path(self):
84
+ """Prediction path plot"""
85
+ model_name = self.model_results.get("model_name")
86
+ res = self.api_client.get(
87
+ f"{MODEL_SVG_URI}?project_name={self.project_name}&model_name={model_name}"
88
+ )
89
+
90
+ if not res["success"]:
91
+ raise Exception(res.get("details"))
92
+
93
+ svg = SVG(res.get("details"))
94
+ display(svg)
95
+
96
+ def data_config(self):
97
+ """returns data config for the project
98
+
99
+ :return: response
100
+ """
101
+ model_name = self.model_results.get("model_name")
102
+ res = self.api_client.get(
103
+ f"{GET_PROJECT_CONFIG}?project_name={self.project_name}&model_name={model_name}"
104
+ )
105
+ if res.get("details") != "Not Found":
106
+ res["details"].pop("updated_by")
107
+ res["details"]["metadata"].pop("path")
108
+ res["details"]["metadata"].pop("avaialble_tags")
109
+
110
+ return res.get("details")
@@ -0,0 +1,549 @@
1
+ import pandas as pd
2
+ from pydantic import BaseModel
3
+ from typing import Dict, List, Optional
4
+ from lexsi_sdk.client.client import APIClient
5
+ from lexsi_sdk.common.validation import Validate
6
+ from lexsi_sdk.common.xai_uris import (
7
+ AVAILABLE_CUSTOM_SERVERS_URI,
8
+ CREATE_WORKSPACE_URI,
9
+ GET_WORKSPACES_URI,
10
+ INVITE_USER_ORGANIZATION_URI,
11
+ ORGANIZATION_MEMBERS_URI,
12
+ REMOVE_USER_ORGANIZATION_URI,
13
+ UPDATE_ORGANIZATION_URI,
14
+ )
15
+ from lexsi_sdk.core.workspace import Workspace
16
+ from lexsi_sdk.common.types import GCSConfig, S3Config, GDriveConfig, SFTPConfig
17
+ from lexsi_sdk.common.xai_uris import (
18
+ AVAILABLE_CUSTOM_SERVERS_URI,
19
+ CREATE_DATA_CONNECTORS,
20
+ LIST_DATA_CONNECTORS,
21
+ DELETE_DATA_CONNECTORS,
22
+ TEST_DATA_CONNECTORS,
23
+ DROPBOX_OAUTH,
24
+ LIST_BUCKETS,
25
+ LIST_FILEPATHS,
26
+ COMPUTE_CREDIT_URI,
27
+ )
28
+ from lexsi_sdk.core.utils import build_url, build_list_data_connector_url
29
+
30
+
31
+ class Organization(BaseModel):
32
+ """Class to work with Lexsi ai organizations"""
33
+
34
+ organization_id: Optional[str] = None
35
+ name: str
36
+ created_by: str
37
+ created_at: Optional[str] = None
38
+
39
+ api_client: APIClient
40
+
41
+ def __init__(self, **kwargs):
42
+ """Attach API client to the organization instance."""
43
+ super().__init__(**kwargs)
44
+ self.api_client = kwargs.get("api_client")
45
+
46
+ def add_user_to_organization(self, user_email: str) -> str:
47
+ """Add user to Organization
48
+
49
+ :param user_email: Email of user to be added to organization.
50
+ :return: response
51
+ """
52
+ payload = {
53
+ "email": user_email,
54
+ "organization_id": self.organization_id,
55
+ }
56
+ res = self.api_client.post(INVITE_USER_ORGANIZATION_URI, payload)
57
+
58
+ if not res["success"]:
59
+ raise Exception(res.get("details", "Failed to add user to organization"))
60
+
61
+ return res.get("details", "User added successfully")
62
+
63
+ def remove_user_from_organization(self, user_email: str) -> str:
64
+ """Remove user from Organization
65
+
66
+ :param user_email: Email of user to be removed from organization.
67
+ :return: response
68
+ """
69
+ payload = {
70
+ "organization_user_email": user_email,
71
+ "organization_id": self.organization_id,
72
+ }
73
+ res = self.api_client.post(REMOVE_USER_ORGANIZATION_URI, payload)
74
+
75
+ if not res["success"]:
76
+ raise Exception(
77
+ res.get("details", "Failed to remove user from organization")
78
+ )
79
+
80
+ return res.get("details", "User removed successfully")
81
+
82
+ def member_details(self) -> pd.DataFrame:
83
+ """Organization Member details
84
+
85
+ :return: member details dataframe
86
+ """
87
+ res = self.api_client.get(
88
+ f"{ORGANIZATION_MEMBERS_URI}?organization_id={self.organization_id}"
89
+ )
90
+
91
+ if not res["success"]:
92
+ raise Exception(
93
+ res.get("details", "Failed to get organization member details")
94
+ )
95
+
96
+ member_details_df = pd.DataFrame(
97
+ res.get("details").get("users"),
98
+ columns=[
99
+ "full_name",
100
+ "email",
101
+ "organization_owner",
102
+ "organization_admin",
103
+ "created_at",
104
+ ],
105
+ )
106
+
107
+ return member_details_df
108
+
109
+ def workspaces(self) -> pd.DataFrame:
110
+ """get user workspaces
111
+
112
+ :return: workspace details dataframe
113
+ """
114
+
115
+ url = GET_WORKSPACES_URI
116
+ if self.organization_id:
117
+ url = url + f"?organization_id={self.organization_id}"
118
+ workspaces = self.api_client.get(url)
119
+
120
+ workspace_df = pd.DataFrame(
121
+ workspaces["details"],
122
+ columns=[
123
+ "user_workspace_name",
124
+ "access_type",
125
+ "created_by",
126
+ "created_at",
127
+ "updated_at",
128
+ "instance_type",
129
+ "instance_status",
130
+ ],
131
+ )
132
+
133
+ return workspace_df
134
+
135
+ def workspace(self, workspace_name: str) -> Workspace:
136
+ """select specific workspace
137
+
138
+ :param workspace_name: Name of the workspace to be used
139
+ :return: Workspace
140
+ """
141
+
142
+ url = GET_WORKSPACES_URI
143
+ if self.organization_id:
144
+ url = url + f"?organization_id={self.organization_id}"
145
+ workspaces = self.api_client.get(url)
146
+ user_workspaces = [
147
+ Workspace(api_client=self.api_client, **workspace)
148
+ for workspace in workspaces["details"]
149
+ ]
150
+
151
+ workspace = next(
152
+ filter(
153
+ lambda workspace: workspace.user_workspace_name == workspace_name,
154
+ user_workspaces,
155
+ ),
156
+ None,
157
+ )
158
+
159
+ if workspace is None:
160
+ raise Exception("Workspace Not Found")
161
+
162
+ return workspace
163
+
164
+ def create_workspace(
165
+ self, workspace_name: str, server_type: Optional[str] = None
166
+ ) -> Workspace:
167
+ """create user workspace
168
+
169
+ :param workspace_name: name for the workspace
170
+ :param server_type: dedicated instance to run workloads
171
+ for all available instances check xai.available_custom_servers()
172
+ defaults to shared
173
+ :return: response
174
+ """
175
+ payload = {"workspace_name": workspace_name}
176
+
177
+ if server_type:
178
+ custom_servers = self.api_client.get(AVAILABLE_CUSTOM_SERVERS_URI)
179
+ Validate.value_against_list(
180
+ "server_type",
181
+ server_type,
182
+ [server["name"] for server in custom_servers],
183
+ )
184
+
185
+ payload["instance_type"] = server_type
186
+ payload["server_config"] = {}
187
+
188
+ if self.organization_id:
189
+ payload["organization_id"] = self.organization_id
190
+
191
+ res = self.api_client.post(CREATE_WORKSPACE_URI, payload)
192
+
193
+ if not res["success"]:
194
+ raise Exception(res.get("details"))
195
+
196
+ workspace = Workspace(api_client=self.api_client, **res["workspace_details"])
197
+
198
+ return workspace
199
+
200
+ def __print__(self) -> str:
201
+ """User-friendly string representation."""
202
+ return f"Organization(name='{self.name}', created_by='{self.created_by}', created_at='{self.created_at}')"
203
+
204
+ def __str__(self) -> str:
205
+ """Return printable representation."""
206
+ return self.__print__()
207
+
208
+ def __repr__(self) -> str:
209
+ """Return developer-friendly representation."""
210
+ return self.__print__()
211
+
212
+ def create_data_connectors(
213
+ self,
214
+ data_connector_name: str,
215
+ data_connector_type: str,
216
+ gcs_config: Optional[GCSConfig] = None,
217
+ s3_config: Optional[S3Config] = None,
218
+ gdrive_config: Optional[GDriveConfig] = None,
219
+ sftp_config: Optional[SFTPConfig] = None,
220
+ hf_token: Optional[str] = None,
221
+ ) -> str:
222
+ """Create Data Connectors for project
223
+
224
+ :param data_connector_name: str # name for data connector
225
+ :param data_connector_type: str # type of data connector (s3 | gcs | gdrive)
226
+ :param gcs_config: dict # credentials from service account json
227
+ :param s3_config: dict # credentials of s3 storage
228
+ :param gdrive_config: dict # credentials from service account json
229
+ :param sftp_config: dict # hostname, port, username and password for sftp connection
230
+ :return: response
231
+ """
232
+ if not self.organization_id:
233
+ return "No Organization id found"
234
+ if data_connector_type.lower() == "s3":
235
+ if not s3_config:
236
+ return "No configuration for S3 found"
237
+
238
+ Validate.value_against_list(
239
+ "s3 config",
240
+ list(s3_config.keys()),
241
+ ["region", "access_key", "secret_key"],
242
+ )
243
+
244
+ payload = {
245
+ "link_service": {
246
+ "service_name": data_connector_name,
247
+ "region": s3_config.get("region", "ap-south-1"),
248
+ "access_key": s3_config.get("access_key"),
249
+ "secret_key": s3_config.get("secret_key"),
250
+ },
251
+ "link_service_type": data_connector_type,
252
+ }
253
+
254
+ if data_connector_type.lower() == "gcs":
255
+ if not gcs_config:
256
+ return "No configuration for GCS found"
257
+
258
+ Validate.value_against_list(
259
+ "gcs config",
260
+ list(gcs_config.keys()),
261
+ [
262
+ "project_id",
263
+ "gcp_project_name",
264
+ "type",
265
+ "private_key_id",
266
+ "private_key",
267
+ "client_email",
268
+ "client_id",
269
+ "auth_uri",
270
+ "token_uri",
271
+ ],
272
+ )
273
+
274
+ payload = {
275
+ "link_service": {
276
+ "service_name": data_connector_name,
277
+ "project_id": gcs_config.get("project_id"),
278
+ "gcp_project_name": gcs_config.get("gcp_project_name"),
279
+ "service_account_json": {
280
+ "type": gcs_config.get("type"),
281
+ "project_id": gcs_config.get("project_id"),
282
+ "private_key_id": gcs_config.get("private_key_id"),
283
+ "private_key": gcs_config.get("private_key"),
284
+ "client_email": gcs_config.get("client_email"),
285
+ "client_id": gcs_config.get("client_id"),
286
+ "auth_uri": gcs_config.get("auth_uri"),
287
+ "token_uri": gcs_config.get("token_uri"),
288
+ },
289
+ },
290
+ "link_service_type": data_connector_type,
291
+ }
292
+
293
+ if data_connector_type == "gdrive":
294
+ if not gdrive_config:
295
+ return "No configuration for Google Drive found"
296
+
297
+ Validate.value_against_list(
298
+ "gdrive config",
299
+ list(gdrive_config.keys()),
300
+ [
301
+ "project_id",
302
+ "type",
303
+ "private_key_id",
304
+ "private_key",
305
+ "client_email",
306
+ "client_id",
307
+ "auth_uri",
308
+ "token_uri",
309
+ ],
310
+ )
311
+
312
+ payload = {
313
+ "link_service": {
314
+ "service_name": data_connector_name,
315
+ "service_account_json": {
316
+ "type": gdrive_config.get("type"),
317
+ "project_id": gdrive_config.get("project_id"),
318
+ "private_key_id": gdrive_config.get("private_key_id"),
319
+ "private_key": gdrive_config.get("private_key"),
320
+ "client_email": gdrive_config.get("client_email"),
321
+ "client_id": gdrive_config.get("client_id"),
322
+ "auth_uri": gdrive_config.get("auth_uri"),
323
+ "token_uri": gdrive_config.get("token_uri"),
324
+ },
325
+ },
326
+ "link_service_type": data_connector_type,
327
+ }
328
+
329
+ if data_connector_type == "sftp":
330
+ if not sftp_config:
331
+ return "No configuration for Google Drive found"
332
+
333
+ Validate.value_against_list(
334
+ "sftp config",
335
+ list(sftp_config.keys()),
336
+ ["hostname", "port", "username", "password"],
337
+ )
338
+
339
+ payload = {
340
+ "link_service": {
341
+ "service_name": data_connector_name,
342
+ "sftp_json": {
343
+ "hostname": sftp_config.get("hostname"),
344
+ "port": sftp_config.get("port"),
345
+ "username": sftp_config.get("username"),
346
+ "password": sftp_config.get("password"),
347
+ },
348
+ },
349
+ "link_service_type": data_connector_type,
350
+ }
351
+
352
+ if data_connector_type == "dropbox":
353
+ url_data = self.api_client.get(
354
+ f"{DROPBOX_OAUTH}?organization_id={self.organization_id}"
355
+ )
356
+ print(f"Url: {url_data['details']['url']}")
357
+ code = input(f"{url_data['details']['message']}: ")
358
+
359
+ if not code:
360
+ return "No authentication code provided."
361
+
362
+ payload = {
363
+ "link_service": {
364
+ "service_name": data_connector_name,
365
+ "dropbox_json": {"code": code},
366
+ },
367
+ "link_service_type": data_connector_type,
368
+ }
369
+
370
+ if data_connector_type == "HuggingFace":
371
+ if not hf_token:
372
+ return "No hf_token provided"
373
+
374
+ payload = {
375
+ "link_service":{
376
+ "service_name": data_connector_name,
377
+ "hf_token": hf_token
378
+ },
379
+ "link_service_type": data_connector_type
380
+ }
381
+
382
+ url = build_url(
383
+ CREATE_DATA_CONNECTORS, data_connector_name, None, self.organization_id
384
+ )
385
+ res = self.api_client.post(url, payload)
386
+ return res["details"]
387
+
388
+ def test_data_connectors(self, data_connector_name) -> str:
389
+ """Test connection for the data connectors
390
+
391
+ :param data_connector_name: str
392
+ """
393
+ if not data_connector_name:
394
+ return "Missing argument data_connector_name"
395
+ if not self.organization_id:
396
+ return "No Project Name or Organization id found"
397
+ url = build_url(
398
+ TEST_DATA_CONNECTORS, data_connector_name, None, self.organization_id
399
+ )
400
+ res = self.api_client.post(url)
401
+ return res["details"]
402
+
403
+ def delete_data_connectors(self, data_connector_name) -> str:
404
+ """Delete the data connectors
405
+
406
+ :param data_connector_name: str
407
+ """
408
+ if not data_connector_name:
409
+ return "Missing argument data_connector_name"
410
+ if not self.organization_id:
411
+ return "No Project Name or Organization id found"
412
+
413
+ url = build_url(
414
+ DELETE_DATA_CONNECTORS, data_connector_name, None, self.organization_id
415
+ )
416
+ res = self.api_client.post(url)
417
+ return res["details"]
418
+
419
+ def list_data_connectors(self) -> str | pd.DataFrame:
420
+ """List the data connectors"""
421
+ url = build_list_data_connector_url(
422
+ LIST_DATA_CONNECTORS, None, self.organization_id
423
+ )
424
+ res = self.api_client.post(url)
425
+
426
+ if res["success"]:
427
+ df = pd.DataFrame(res["details"])
428
+ df = df.drop(
429
+ [
430
+ "_id",
431
+ "region",
432
+ "gcp_project_name",
433
+ "gcp_project_id",
434
+ "gdrive_file_name",
435
+ "project_name",
436
+ ],
437
+ axis=1,
438
+ errors="ignore",
439
+ )
440
+ return df
441
+
442
+ return res["details"]
443
+
444
+ def list_data_connectors_buckets(self, data_connector_name) -> str | List:
445
+ """List the buckets in data connectors
446
+
447
+ :param data_connector_name: str
448
+ """
449
+ if not data_connector_name:
450
+ return "Missing argument data_connector_name"
451
+ if not self.organization_id:
452
+ return "No Organization id found"
453
+
454
+ url = build_url(LIST_BUCKETS, data_connector_name, None, self.organization_id)
455
+ res = self.api_client.get(url)
456
+
457
+ if res.get("message", None):
458
+ print(res["message"])
459
+ return res["details"]
460
+
461
+ def list_data_connectors_filepath(
462
+ self,
463
+ data_connector_name,
464
+ bucket_name: Optional[str] = None,
465
+ root_folder: Optional[str] = None,
466
+ ) -> str | Dict:
467
+ """List the filepaths in data connectors
468
+
469
+ :param data_connector_name: str
470
+ :param bucket_name: str | Required for S3 & GCS
471
+ :param root_folder: str | Root folder of SFTP
472
+ """
473
+ if not data_connector_name:
474
+ return "Missing argument data_connector_name"
475
+ if not self.organization_id:
476
+ return "No Organization id found"
477
+
478
+ def get_connector() -> str | pd.DataFrame:
479
+ """Retrieve connector metadata for the given link service name."""
480
+ url = build_list_data_connector_url(
481
+ LIST_DATA_CONNECTORS, None, self.organization_id
482
+ )
483
+ res = self.api_client.post(url)
484
+
485
+ if res["success"]:
486
+ df = pd.DataFrame(res["details"])
487
+ filtered_df = df.loc[df["link_service_name"] == data_connector_name]
488
+ if filtered_df.empty:
489
+ return "No data connector found"
490
+ return filtered_df
491
+
492
+ return res["details"]
493
+
494
+ connectors = get_connector()
495
+ if isinstance(connectors, pd.DataFrame):
496
+ value = connectors.loc[
497
+ connectors["link_service_name"] == data_connector_name,
498
+ "link_service_type",
499
+ ].values[0]
500
+ ds_type = value
501
+
502
+ if ds_type == "s3" or ds_type == "gcs":
503
+ if not bucket_name:
504
+ return "Missing argument bucket_name"
505
+
506
+ if ds_type == "sftp":
507
+ if not root_folder:
508
+ return "Missing argument root_folder"
509
+
510
+ if self.organization_id:
511
+ url = f"{LIST_FILEPATHS}?organization_id={self.organization_id}&link_service_name={data_connector_name}&bucket_name={bucket_name}&root_folder={root_folder}"
512
+ res = self.api_client.get(url)
513
+
514
+ if res.get("message", None):
515
+ print(res["message"])
516
+ return res["details"]
517
+
518
+ def credits(self):
519
+ """Return available credit information for the organization."""
520
+ url = build_list_data_connector_url(
521
+ COMPUTE_CREDIT_URI, None, self.organization_id
522
+ )
523
+ res = self.api_client.get(url)
524
+ return res["details"]
525
+
526
+ def update_user_access_for_organization(
527
+ self,
528
+ user_email: str,
529
+ access_type: str = ["admin", "user"],
530
+ ) -> str:
531
+ """Update user access for project
532
+
533
+ :param user_email: Email of user to be added to project.
534
+ :param access_type: access type to be given to user (admin | write | read)
535
+ :return: response
536
+ """
537
+ if access_type not in ["admin", "user"]:
538
+ raise ValueError("access_type must be either 'admin' or 'user'")
539
+ payload = {
540
+ "organization_user_email": user_email,
541
+ "organization_id": self.organization_id,
542
+ "organization_admin": True if access_type=="admin" else False,
543
+ }
544
+ res = self.api_client.post(UPDATE_ORGANIZATION_URI, payload)
545
+
546
+ if not res["success"]:
547
+ raise Exception(res.get("details", "Failed to update user access"))
548
+
549
+ return res.get("details", "User access updated successfully")