mlops-python-sdk 0.0.1__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. mlops/__init__.py +3 -3
  2. mlops/api/client/api/storage/__init__.py +1 -0
  3. mlops/api/client/api/storage/get_storage_presign_download.py +175 -0
  4. mlops/api/client/api/storage/get_storage_presign_upload.py +175 -0
  5. mlops/api/client/api/tasks/cancel_task.py +14 -14
  6. mlops/api/client/api/tasks/delete_task.py +14 -14
  7. mlops/api/client/api/tasks/get_task.py +15 -15
  8. mlops/api/client/api/tasks/get_task_by_task_id.py +204 -0
  9. mlops/api/client/api/tasks/get_task_logs.py +300 -0
  10. mlops/api/client/api/tasks/list_tasks.py +14 -14
  11. mlops/api/client/models/__init__.py +22 -0
  12. mlops/api/client/models/get_storage_presign_download_response_200.py +60 -0
  13. mlops/api/client/models/get_storage_presign_upload_response_200.py +79 -0
  14. mlops/api/client/models/get_task_logs_direction.py +9 -0
  15. mlops/api/client/models/get_task_logs_log_type.py +10 -0
  16. mlops/api/client/models/job_spec.py +273 -0
  17. mlops/api/client/models/job_spec_env.py +44 -0
  18. mlops/api/client/models/job_spec_master_strategy.py +8 -0
  19. mlops/api/client/models/log_pagination.py +90 -0
  20. mlops/api/client/models/task_log_entry.py +105 -0
  21. mlops/api/client/models/task_log_entry_log_type.py +9 -0
  22. mlops/api/client/models/task_logs_response.py +112 -0
  23. mlops/api/client/models/task_submit_request.py +24 -6
  24. mlops/connection_config.py +4 -11
  25. mlops/exceptions.py +10 -10
  26. mlops/task/__init__.py +1 -1
  27. mlops/task/client.py +11 -35
  28. mlops/task/task.py +186 -40
  29. {mlops_python_sdk-0.0.1.dist-info → mlops_python_sdk-1.0.1.dist-info}/METADATA +21 -30
  30. mlops_python_sdk-1.0.1.dist-info/RECORD +52 -0
  31. mlops_python_sdk-0.0.1.dist-info/RECORD +0 -36
  32. {mlops_python_sdk-0.0.1.dist-info → mlops_python_sdk-1.0.1.dist-info}/WHEEL +0 -0
@@ -0,0 +1,90 @@
1
+ from collections.abc import Mapping
2
+ from typing import Any, TypeVar, Union, cast
3
+
4
+ from attrs import define as _attrs_define
5
+ from attrs import field as _attrs_field
6
+
7
+ from ..types import UNSET, Unset
8
+
9
+ T = TypeVar("T", bound="LogPagination")
10
+
11
+
12
+ @_attrs_define
13
+ class LogPagination:
14
+ """Pagination information for logs
15
+
16
+ Attributes:
17
+ has_more (Union[Unset, bool]): Whether there are more logs available Example: True.
18
+ next_cursor (Union[None, Unset, str]): Cursor for the next page (timestamp in nanoseconds) Example:
19
+ 1706612400123456789.
20
+ total_fetched (Union[Unset, int]): Number of log entries fetched in this request Example: 1000.
21
+ """
22
+
23
+ has_more: Union[Unset, bool] = UNSET
24
+ next_cursor: Union[None, Unset, str] = UNSET
25
+ total_fetched: Union[Unset, int] = UNSET
26
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
27
+
28
+ def to_dict(self) -> dict[str, Any]:
29
+ has_more = self.has_more
30
+
31
+ next_cursor: Union[None, Unset, str]
32
+ if isinstance(self.next_cursor, Unset):
33
+ next_cursor = UNSET
34
+ else:
35
+ next_cursor = self.next_cursor
36
+
37
+ total_fetched = self.total_fetched
38
+
39
+ field_dict: dict[str, Any] = {}
40
+ field_dict.update(self.additional_properties)
41
+ field_dict.update({})
42
+ if has_more is not UNSET:
43
+ field_dict["has_more"] = has_more
44
+ if next_cursor is not UNSET:
45
+ field_dict["next_cursor"] = next_cursor
46
+ if total_fetched is not UNSET:
47
+ field_dict["total_fetched"] = total_fetched
48
+
49
+ return field_dict
50
+
51
+ @classmethod
52
+ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
53
+ d = dict(src_dict)
54
+ has_more = d.pop("has_more", UNSET)
55
+
56
+ def _parse_next_cursor(data: object) -> Union[None, Unset, str]:
57
+ if data is None:
58
+ return data
59
+ if isinstance(data, Unset):
60
+ return data
61
+ return cast(Union[None, Unset, str], data)
62
+
63
+ next_cursor = _parse_next_cursor(d.pop("next_cursor", UNSET))
64
+
65
+ total_fetched = d.pop("total_fetched", UNSET)
66
+
67
+ log_pagination = cls(
68
+ has_more=has_more,
69
+ next_cursor=next_cursor,
70
+ total_fetched=total_fetched,
71
+ )
72
+
73
+ log_pagination.additional_properties = d
74
+ return log_pagination
75
+
76
+ @property
77
+ def additional_keys(self) -> list[str]:
78
+ return list(self.additional_properties.keys())
79
+
80
+ def __getitem__(self, key: str) -> Any:
81
+ return self.additional_properties[key]
82
+
83
+ def __setitem__(self, key: str, value: Any) -> None:
84
+ self.additional_properties[key] = value
85
+
86
+ def __delitem__(self, key: str) -> None:
87
+ del self.additional_properties[key]
88
+
89
+ def __contains__(self, key: str) -> bool:
90
+ return key in self.additional_properties
@@ -0,0 +1,105 @@
1
+ import datetime
2
+ from collections.abc import Mapping
3
+ from typing import Any, TypeVar, Union
4
+
5
+ from attrs import define as _attrs_define
6
+ from attrs import field as _attrs_field
7
+ from dateutil.parser import isoparse
8
+
9
+ from ..models.task_log_entry_log_type import TaskLogEntryLogType
10
+ from ..types import UNSET, Unset
11
+
12
+ T = TypeVar("T", bound="TaskLogEntry")
13
+
14
+
15
+ @_attrs_define
16
+ class TaskLogEntry:
17
+ """A single log entry
18
+
19
+ Attributes:
20
+ content (Union[Unset, str]): Log line content Example: Training epoch 1/100....
21
+ log_type (Union[Unset, TaskLogEntryLogType]): Log type (stdout or stderr) Example: stdout.
22
+ node (Union[Unset, str]): Node hostname where the log was generated Example: compute-node-01.
23
+ timestamp (Union[Unset, datetime.datetime]): Log timestamp in RFC3339 format Example:
24
+ 2024-01-30T10:30:00.123456789Z.
25
+ """
26
+
27
+ content: Union[Unset, str] = UNSET
28
+ log_type: Union[Unset, TaskLogEntryLogType] = UNSET
29
+ node: Union[Unset, str] = UNSET
30
+ timestamp: Union[Unset, datetime.datetime] = UNSET
31
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
32
+
33
+ def to_dict(self) -> dict[str, Any]:
34
+ content = self.content
35
+
36
+ log_type: Union[Unset, str] = UNSET
37
+ if not isinstance(self.log_type, Unset):
38
+ log_type = self.log_type.value
39
+
40
+ node = self.node
41
+
42
+ timestamp: Union[Unset, str] = UNSET
43
+ if not isinstance(self.timestamp, Unset):
44
+ timestamp = self.timestamp.isoformat()
45
+
46
+ field_dict: dict[str, Any] = {}
47
+ field_dict.update(self.additional_properties)
48
+ field_dict.update({})
49
+ if content is not UNSET:
50
+ field_dict["content"] = content
51
+ if log_type is not UNSET:
52
+ field_dict["log_type"] = log_type
53
+ if node is not UNSET:
54
+ field_dict["node"] = node
55
+ if timestamp is not UNSET:
56
+ field_dict["timestamp"] = timestamp
57
+
58
+ return field_dict
59
+
60
+ @classmethod
61
+ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
62
+ d = dict(src_dict)
63
+ content = d.pop("content", UNSET)
64
+
65
+ _log_type = d.pop("log_type", UNSET)
66
+ log_type: Union[Unset, TaskLogEntryLogType]
67
+ if isinstance(_log_type, Unset):
68
+ log_type = UNSET
69
+ else:
70
+ log_type = TaskLogEntryLogType(_log_type)
71
+
72
+ node = d.pop("node", UNSET)
73
+
74
+ _timestamp = d.pop("timestamp", UNSET)
75
+ timestamp: Union[Unset, datetime.datetime]
76
+ if isinstance(_timestamp, Unset):
77
+ timestamp = UNSET
78
+ else:
79
+ timestamp = isoparse(_timestamp)
80
+
81
+ task_log_entry = cls(
82
+ content=content,
83
+ log_type=log_type,
84
+ node=node,
85
+ timestamp=timestamp,
86
+ )
87
+
88
+ task_log_entry.additional_properties = d
89
+ return task_log_entry
90
+
91
+ @property
92
+ def additional_keys(self) -> list[str]:
93
+ return list(self.additional_properties.keys())
94
+
95
+ def __getitem__(self, key: str) -> Any:
96
+ return self.additional_properties[key]
97
+
98
+ def __setitem__(self, key: str, value: Any) -> None:
99
+ self.additional_properties[key] = value
100
+
101
+ def __delitem__(self, key: str) -> None:
102
+ del self.additional_properties[key]
103
+
104
+ def __contains__(self, key: str) -> bool:
105
+ return key in self.additional_properties
@@ -0,0 +1,9 @@
1
+ from enum import Enum
2
+
3
+
4
+ class TaskLogEntryLogType(str, Enum):
5
+ STDERR = "stderr"
6
+ STDOUT = "stdout"
7
+
8
+ def __str__(self) -> str:
9
+ return str(self.value)
@@ -0,0 +1,112 @@
1
+ from collections.abc import Mapping
2
+ from typing import TYPE_CHECKING, Any, TypeVar, Union
3
+
4
+ from attrs import define as _attrs_define
5
+ from attrs import field as _attrs_field
6
+
7
+ from ..types import UNSET, Unset
8
+
9
+ if TYPE_CHECKING:
10
+ from ..models.log_pagination import LogPagination
11
+ from ..models.task_log_entry import TaskLogEntry
12
+
13
+
14
+ T = TypeVar("T", bound="TaskLogsResponse")
15
+
16
+
17
+ @_attrs_define
18
+ class TaskLogsResponse:
19
+ """Task logs response with pagination support
20
+
21
+ Attributes:
22
+ cluster_id (Union[Unset, int]): Cluster ID Example: 1.
23
+ job_id (Union[Unset, int]): Slurm Job ID Example: 12345.
24
+ logs (Union[Unset, list['TaskLogEntry']]): List of log entries
25
+ pagination (Union[Unset, LogPagination]): Pagination information for logs
26
+ """
27
+
28
+ cluster_id: Union[Unset, int] = UNSET
29
+ job_id: Union[Unset, int] = UNSET
30
+ logs: Union[Unset, list["TaskLogEntry"]] = UNSET
31
+ pagination: Union[Unset, "LogPagination"] = UNSET
32
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
33
+
34
+ def to_dict(self) -> dict[str, Any]:
35
+ cluster_id = self.cluster_id
36
+
37
+ job_id = self.job_id
38
+
39
+ logs: Union[Unset, list[dict[str, Any]]] = UNSET
40
+ if not isinstance(self.logs, Unset):
41
+ logs = []
42
+ for logs_item_data in self.logs:
43
+ logs_item = logs_item_data.to_dict()
44
+ logs.append(logs_item)
45
+
46
+ pagination: Union[Unset, dict[str, Any]] = UNSET
47
+ if not isinstance(self.pagination, Unset):
48
+ pagination = self.pagination.to_dict()
49
+
50
+ field_dict: dict[str, Any] = {}
51
+ field_dict.update(self.additional_properties)
52
+ field_dict.update({})
53
+ if cluster_id is not UNSET:
54
+ field_dict["cluster_id"] = cluster_id
55
+ if job_id is not UNSET:
56
+ field_dict["job_id"] = job_id
57
+ if logs is not UNSET:
58
+ field_dict["logs"] = logs
59
+ if pagination is not UNSET:
60
+ field_dict["pagination"] = pagination
61
+
62
+ return field_dict
63
+
64
+ @classmethod
65
+ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
66
+ from ..models.log_pagination import LogPagination
67
+ from ..models.task_log_entry import TaskLogEntry
68
+
69
+ d = dict(src_dict)
70
+ cluster_id = d.pop("cluster_id", UNSET)
71
+
72
+ job_id = d.pop("job_id", UNSET)
73
+
74
+ logs = []
75
+ _logs = d.pop("logs", UNSET)
76
+ for logs_item_data in _logs or []:
77
+ logs_item = TaskLogEntry.from_dict(logs_item_data)
78
+
79
+ logs.append(logs_item)
80
+
81
+ _pagination = d.pop("pagination", UNSET)
82
+ pagination: Union[Unset, LogPagination]
83
+ if isinstance(_pagination, Unset):
84
+ pagination = UNSET
85
+ else:
86
+ pagination = LogPagination.from_dict(_pagination)
87
+
88
+ task_logs_response = cls(
89
+ cluster_id=cluster_id,
90
+ job_id=job_id,
91
+ logs=logs,
92
+ pagination=pagination,
93
+ )
94
+
95
+ task_logs_response.additional_properties = d
96
+ return task_logs_response
97
+
98
+ @property
99
+ def additional_keys(self) -> list[str]:
100
+ return list(self.additional_properties.keys())
101
+
102
+ def __getitem__(self, key: str) -> Any:
103
+ return self.additional_properties[key]
104
+
105
+ def __setitem__(self, key: str, value: Any) -> None:
106
+ self.additional_properties[key] = value
107
+
108
+ def __delitem__(self, key: str) -> None:
109
+ del self.additional_properties[key]
110
+
111
+ def __contains__(self, key: str) -> bool:
112
+ return key in self.additional_properties
@@ -7,6 +7,7 @@ from attrs import field as _attrs_field
7
7
  from ..types import UNSET, Unset
8
8
 
9
9
  if TYPE_CHECKING:
10
+ from ..models.job_spec import JobSpec
10
11
  from ..models.task_submit_request_environment_type_0 import TaskSubmitRequestEnvironmentType0
11
12
 
12
13
 
@@ -18,7 +19,7 @@ class TaskSubmitRequest:
18
19
  """Task submission request
19
20
 
20
21
  Attributes:
21
- cluster_id (int): Slurm cluster ID to submit task to Example: 1.
22
+ cluster_name (str): Slurm cluster name to submit task to Example: slurm-prod.
22
23
  name (str): Task name Example: training-job.
23
24
  account (Union[None, Unset, str]): Account Example: research.
24
25
  command (Union[None, Unset, str]): Command to execute (alternative to script) Example: python train.py.
@@ -35,6 +36,7 @@ class TaskSubmitRequest:
35
36
  export (Union[None, Unset, str]): Environment export Example: ALL.
36
37
  gres (Union[None, Unset, str]): Generic resources (e.g., "gpu:1", "gpu:tesla:2") Example: gpu:1.
37
38
  input_ (Union[None, Unset, str]): Standard input file
39
+ job_spec (Union[Unset, JobSpec]): Domain-specific job specification (rendered into slurm script)
38
40
  mem_bind (Union[None, Unset, str]): Memory binding
39
41
  memory (Union[None, Unset, str]): Memory requirement (e.g., "8G", "4096M") Example: 8G.
40
42
  nice (Union[None, Unset, int]): Nice value
@@ -53,7 +55,7 @@ class TaskSubmitRequest:
53
55
  tres (Union[None, Unset, str]): Trackable resources string Example: cpu=4,mem=8G.
54
56
  """
55
57
 
56
- cluster_id: int
58
+ cluster_name: str
57
59
  name: str
58
60
  account: Union[None, Unset, str] = UNSET
59
61
  command: Union[None, Unset, str] = UNSET
@@ -69,6 +71,7 @@ class TaskSubmitRequest:
69
71
  export: Union[None, Unset, str] = UNSET
70
72
  gres: Union[None, Unset, str] = UNSET
71
73
  input_: Union[None, Unset, str] = UNSET
74
+ job_spec: Union[Unset, "JobSpec"] = UNSET
72
75
  mem_bind: Union[None, Unset, str] = UNSET
73
76
  memory: Union[None, Unset, str] = UNSET
74
77
  nice: Union[None, Unset, int] = UNSET
@@ -88,7 +91,7 @@ class TaskSubmitRequest:
88
91
  def to_dict(self) -> dict[str, Any]:
89
92
  from ..models.task_submit_request_environment_type_0 import TaskSubmitRequestEnvironmentType0
90
93
 
91
- cluster_id = self.cluster_id
94
+ cluster_name = self.cluster_name
92
95
 
93
96
  name = self.name
94
97
 
@@ -178,6 +181,10 @@ class TaskSubmitRequest:
178
181
  else:
179
182
  input_ = self.input_
180
183
 
184
+ job_spec: Union[Unset, dict[str, Any]] = UNSET
185
+ if not isinstance(self.job_spec, Unset):
186
+ job_spec = self.job_spec.to_dict()
187
+
181
188
  mem_bind: Union[None, Unset, str]
182
189
  if isinstance(self.mem_bind, Unset):
183
190
  mem_bind = UNSET
@@ -262,7 +269,7 @@ class TaskSubmitRequest:
262
269
  field_dict.update(self.additional_properties)
263
270
  field_dict.update(
264
271
  {
265
- "cluster_id": cluster_id,
272
+ "cluster_name": cluster_name,
266
273
  "name": name,
267
274
  }
268
275
  )
@@ -294,6 +301,8 @@ class TaskSubmitRequest:
294
301
  field_dict["gres"] = gres
295
302
  if input_ is not UNSET:
296
303
  field_dict["input"] = input_
304
+ if job_spec is not UNSET:
305
+ field_dict["job_spec"] = job_spec
297
306
  if mem_bind is not UNSET:
298
307
  field_dict["mem_bind"] = mem_bind
299
308
  if memory is not UNSET:
@@ -327,10 +336,11 @@ class TaskSubmitRequest:
327
336
 
328
337
  @classmethod
329
338
  def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
339
+ from ..models.job_spec import JobSpec
330
340
  from ..models.task_submit_request_environment_type_0 import TaskSubmitRequestEnvironmentType0
331
341
 
332
342
  d = dict(src_dict)
333
- cluster_id = d.pop("cluster_id")
343
+ cluster_name = d.pop("cluster_name")
334
344
 
335
345
  name = d.pop("name")
336
346
 
@@ -468,6 +478,13 @@ class TaskSubmitRequest:
468
478
 
469
479
  input_ = _parse_input_(d.pop("input", UNSET))
470
480
 
481
+ _job_spec = d.pop("job_spec", UNSET)
482
+ job_spec: Union[Unset, JobSpec]
483
+ if isinstance(_job_spec, Unset):
484
+ job_spec = UNSET
485
+ else:
486
+ job_spec = JobSpec.from_dict(_job_spec)
487
+
471
488
  def _parse_mem_bind(data: object) -> Union[None, Unset, str]:
472
489
  if data is None:
473
490
  return data
@@ -588,7 +605,7 @@ class TaskSubmitRequest:
588
605
  tres = _parse_tres(d.pop("tres", UNSET))
589
606
 
590
607
  task_submit_request = cls(
591
- cluster_id=cluster_id,
608
+ cluster_name=cluster_name,
592
609
  name=name,
593
610
  account=account,
594
611
  command=command,
@@ -604,6 +621,7 @@ class TaskSubmitRequest:
604
621
  export=export,
605
622
  gres=gres,
606
623
  input_=input_,
624
+ job_spec=job_spec,
607
625
  mem_bind=mem_bind,
608
626
  memory=memory,
609
627
  nice=nice,
@@ -19,30 +19,25 @@ class ConnectionConfig:
19
19
 
20
20
  @staticmethod
21
21
  def _domain():
22
- return os.getenv("XCLIENT_DOMAIN", "localhost:8090")
22
+ return os.getenv('MLOPS_DOMAIN', "localhost:8090")
23
23
 
24
24
  @staticmethod
25
25
  def _debug():
26
- return os.getenv("XCLIENT_DEBUG", "false").lower() == "true"
26
+ return os.getenv('MLOPS_DEBUG', "false").lower() == "true"
27
27
 
28
28
  @staticmethod
29
29
  def _api_key():
30
- return os.getenv("XCLIENT_API_KEY")
31
-
32
- @staticmethod
33
- def _access_token():
34
- return os.getenv("XCLIENT_ACCESS_TOKEN")
30
+ return os.getenv('MLOPS_API_KEY')
35
31
 
36
32
  @staticmethod
37
33
  def _api_path():
38
- return os.getenv("XCLIENT_API_PATH", DEFAULT_API_PATH)
34
+ return os.getenv('MLOPS_API_PATH', DEFAULT_API_PATH)
39
35
 
40
36
  def __init__(
41
37
  self,
42
38
  domain: Optional[str] = None,
43
39
  debug: Optional[bool] = None,
44
40
  api_key: Optional[str] = None,
45
- access_token: Optional[str] = None,
46
41
  request_timeout: Optional[float] = None,
47
42
  headers: Optional[Dict[str, str]] = None,
48
43
  proxy: Optional[ProxyTypes] = None,
@@ -51,11 +46,9 @@ class ConnectionConfig:
51
46
  self.domain = domain or ConnectionConfig._domain()
52
47
  self.debug = debug or ConnectionConfig._debug()
53
48
  self.api_key = api_key or ConnectionConfig._api_key()
54
- self.access_token = access_token or ConnectionConfig._access_token()
55
49
  self.headers = headers or {}
56
50
  self.proxy = proxy
57
51
  self.api_path = api_path or ConnectionConfig._api_path()
58
-
59
52
  self.request_timeout = ConnectionConfig._get_request_timeout(
60
53
  REQUEST_TIMEOUT,
61
54
  request_timeout,
mlops/exceptions.py CHANGED
@@ -10,17 +10,17 @@ def format_execution_timeout_error() -> Exception:
10
10
  )
11
11
 
12
12
 
13
- class XClientException(Exception):
13
+ class MLOpsException(Exception):
14
14
  """
15
- Base class for all XClient errors.
15
+ Base class for all MLOps errors.
16
16
 
17
- Raised when a general XClient exception occurs.
17
+ Raised when a general MLOps exception occurs.
18
18
  """
19
19
 
20
20
  pass
21
21
 
22
22
 
23
- class TimeoutException(XClientException):
23
+ class TimeoutException(MLOpsException):
24
24
  """
25
25
  Raised when a timeout occurs.
26
26
 
@@ -33,7 +33,7 @@ class TimeoutException(XClientException):
33
33
  pass
34
34
 
35
35
 
36
- class InvalidArgumentException(XClientException):
36
+ class InvalidArgumentException(MLOpsException):
37
37
  """
38
38
  Raised when an invalid argument is provided.
39
39
  """
@@ -41,7 +41,7 @@ class InvalidArgumentException(XClientException):
41
41
  pass
42
42
 
43
43
 
44
- class NotEnoughSpaceException(XClientException):
44
+ class NotEnoughSpaceException(MLOpsException):
45
45
  """
46
46
  Raised when there is not enough disk space.
47
47
  """
@@ -49,7 +49,7 @@ class NotEnoughSpaceException(XClientException):
49
49
  pass
50
50
 
51
51
 
52
- class NotFoundException(XClientException):
52
+ class NotFoundException(MLOpsException):
53
53
  """
54
54
  Raised when a resource is not found.
55
55
  """
@@ -57,7 +57,7 @@ class NotFoundException(XClientException):
57
57
  pass
58
58
 
59
59
 
60
- class AuthenticationException(XClientException):
60
+ class AuthenticationException(MLOpsException):
61
61
  """
62
62
  Raised when authentication fails.
63
63
  """
@@ -65,7 +65,7 @@ class AuthenticationException(XClientException):
65
65
  pass
66
66
 
67
67
 
68
- class RateLimitException(XClientException):
68
+ class RateLimitException(MLOpsException):
69
69
  """
70
70
  Raised when the API rate limit is exceeded.
71
71
  """
@@ -73,7 +73,7 @@ class RateLimitException(XClientException):
73
73
  pass
74
74
 
75
75
 
76
- class APIException(XClientException):
76
+ class APIException(MLOpsException):
77
77
  """
78
78
  Raised when an API error occurs.
79
79
  """
mlops/task/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- """Task SDK module for XClient"""
1
+ """Task SDK module for MLOps"""
2
2
 
3
3
  from .client import TaskClient
4
4
  from .task import Task
mlops/task/client.py CHANGED
@@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
17
17
 
18
18
 
19
19
  def handle_api_exception(e: Response):
20
- """Handle API exceptions and convert them to appropriate XClient exceptions."""
20
+ """Handle API exceptions and convert them to appropriate MLOps exceptions."""
21
21
  try:
22
22
  body = json.loads(e.content) if e.content else {}
23
23
  except json.JSONDecodeError:
@@ -49,51 +49,27 @@ def handle_api_exception(e: Response):
49
49
 
50
50
  class TaskClient(AuthenticatedClient):
51
51
  """
52
- The client for interacting with the XClient Task API.
52
+ The client for interacting with the MLOps Task API.
53
53
  """
54
54
 
55
55
  def __init__(
56
56
  self,
57
57
  config: ConnectionConfig,
58
- require_api_key: bool = True,
59
- require_access_token: bool = False,
60
58
  limits: Optional[Limits] = None,
61
59
  *args,
62
60
  **kwargs,
63
61
  ):
64
- if require_api_key and require_access_token:
62
+ # NOTE: This SDK client only supports API key authentication.
63
+ # Header: X-API-Key (per OpenAPI spec)
64
+ if config.api_key is None:
65
65
  raise AuthenticationException(
66
- "Only one of api_key or access_token can be required, not both",
66
+ "API key is required. "
67
+ "You can set the environment variable `MLOPS_API_KEY`"
68
+ ' to pass it directly like ConnectionConfig(api_key="mlops_...")',
67
69
  )
68
70
 
69
- if not require_api_key and not require_access_token:
70
- raise AuthenticationException(
71
- "Either api_key or access_token is required",
72
- )
73
-
74
- token = None
75
- if require_api_key:
76
- if config.api_key is None:
77
- raise AuthenticationException(
78
- "API key is required. "
79
- "You can either set the environment variable `XCLIENT_API_KEY` "
80
- 'or pass it directly like TaskClient(api_key="xclient_...")',
81
- )
82
- token = config.api_key
83
-
84
- if require_access_token:
85
- if config.access_token is None:
86
- raise AuthenticationException(
87
- "Access token is required. "
88
- "You can set the environment variable `XCLIENT_ACCESS_TOKEN` "
89
- "or pass the `access_token` in options.",
90
- )
91
- token = config.access_token
92
-
93
- # API Key header: X-API-Key (per OpenAPI spec)
94
- # JWT header: Authorization: Bearer <token>
95
- auth_header_name = "X-API-Key" if require_api_key else "Authorization"
96
- prefix = "" if require_api_key else "Bearer"
71
+ auth_header_name = "X-API-Key"
72
+ prefix = ""
97
73
 
98
74
  headers = {
99
75
  **(config.headers or {}),
@@ -116,9 +92,9 @@ class TaskClient(AuthenticatedClient):
116
92
  base_url=config.api_url,
117
93
  httpx_args=httpx_args,
118
94
  headers=headers,
119
- token=token,
120
95
  auth_header_name=auth_header_name,
121
96
  prefix=prefix,
97
+ token=config.api_key,
122
98
  *args,
123
99
  **kwargs,
124
100
  )