superb-ai-onprem 0.5.9__py3-none-any.whl → 0.5.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of superb-ai-onprem might be problematic. Click here for more details.

Files changed (57) hide show
  1. spb_onprem/__init__.py +8 -0
  2. spb_onprem/_version.py +2 -2
  3. spb_onprem/activities/service.py +1 -2
  4. spb_onprem/contents/params/__init__.py +2 -0
  5. spb_onprem/contents/params/delete_content.py +10 -0
  6. spb_onprem/contents/queries.py +11 -0
  7. spb_onprem/contents/service.py +19 -0
  8. spb_onprem/data/entities/data.py +0 -1
  9. spb_onprem/data/params/__init__.py +8 -0
  10. spb_onprem/data/params/create_data.py +2 -10
  11. spb_onprem/data/params/get_data_detail.py +14 -0
  12. spb_onprem/data/params/get_evaluation_value_list.py +36 -0
  13. spb_onprem/data/params/update_data.py +0 -16
  14. spb_onprem/data/params/upsert_data_meta.py +0 -12
  15. spb_onprem/data/queries.py +72 -6
  16. spb_onprem/data/service.py +103 -108
  17. spb_onprem/datasets/params/__init__.py +2 -0
  18. spb_onprem/datasets/params/delete_dataset.py +12 -0
  19. spb_onprem/datasets/queries.py +11 -0
  20. spb_onprem/datasets/service.py +18 -0
  21. spb_onprem/entities.py +4 -0
  22. spb_onprem/inferences/__init__.py +5 -0
  23. spb_onprem/inferences/service.py +56 -0
  24. spb_onprem/models/__init__.py +7 -0
  25. spb_onprem/models/entities.py +9 -0
  26. spb_onprem/models/params/__init__.py +7 -0
  27. spb_onprem/models/params/delete_model.py +14 -0
  28. spb_onprem/models/params/get_models.py +29 -0
  29. spb_onprem/models/queries.py +33 -0
  30. spb_onprem/models/service.py +76 -0
  31. spb_onprem/predictions/__init__.py +7 -0
  32. spb_onprem/predictions/entities.py +11 -0
  33. spb_onprem/predictions/params/__init__.py +15 -0
  34. spb_onprem/predictions/params/create_prediction_set.py +44 -0
  35. spb_onprem/predictions/params/delete_prediction_from_data.py +20 -0
  36. spb_onprem/predictions/params/delete_prediction_set.py +14 -0
  37. spb_onprem/predictions/params/get_prediction_set.py +14 -0
  38. spb_onprem/predictions/params/get_prediction_sets.py +29 -0
  39. spb_onprem/predictions/params/update_prediction_set_data_info.py +28 -0
  40. spb_onprem/predictions/queries.py +110 -0
  41. spb_onprem/predictions/service.py +225 -0
  42. {superb_ai_onprem-0.5.9.dist-info → superb_ai_onprem-0.5.11.dist-info}/METADATA +1 -1
  43. {superb_ai_onprem-0.5.9.dist-info → superb_ai_onprem-0.5.11.dist-info}/RECORD +57 -25
  44. tests/activities/test_params.py +2 -2
  45. tests/activities/test_service.py +28 -38
  46. tests/data/__init__.py +0 -0
  47. tests/data/test_data_service.py +412 -0
  48. tests/datasets/__init__.py +1 -0
  49. tests/datasets/test_dataset_service.py +135 -0
  50. tests/exports/test_service.py +1 -0
  51. tests/models/__init__.py +1 -0
  52. tests/models/test_model_service.py +249 -0
  53. tests/predictions/__init__.py +1 -0
  54. tests/predictions/test_prediction_service.py +359 -0
  55. {superb_ai_onprem-0.5.9.dist-info → superb_ai_onprem-0.5.11.dist-info}/WHEEL +0 -0
  56. {superb_ai_onprem-0.5.9.dist-info → superb_ai_onprem-0.5.11.dist-info}/licenses/LICENSE +0 -0
  57. {superb_ai_onprem-0.5.9.dist-info → superb_ai_onprem-0.5.11.dist-info}/top_level.txt +0 -0
spb_onprem/__init__.py CHANGED
@@ -10,6 +10,9 @@ from .slices.service import SliceService
10
10
  from .activities.service import ActivityService
11
11
  from .exports.service import ExportService
12
12
  from .contents.service import ContentService
13
+ from .predictions.service import PredictionService
14
+ from .models.service import ModelService
15
+ from .inferences.service import InferService
13
16
 
14
17
  # Core Entities and Enums
15
18
  from .entities import (
@@ -77,6 +80,9 @@ __all__ = (
77
80
  "ActivityService",
78
81
  "ExportService",
79
82
  "ContentService",
83
+ "PredictionService",
84
+ "ModelService",
85
+ "InferService",
80
86
 
81
87
  # Core Entities
82
88
  "Data",
@@ -93,6 +99,8 @@ __all__ = (
93
99
  "Export",
94
100
  "Content",
95
101
  "Frame",
102
+ "PredictionSet",
103
+ "Model",
96
104
 
97
105
  # Enums
98
106
  "DataType",
spb_onprem/_version.py CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '0.5.9'
32
- __version_tuple__ = version_tuple = (0, 5, 9)
31
+ __version__ = version = '0.5.11'
32
+ __version_tuple__ = version_tuple = (0, 5, 11)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -65,8 +65,7 @@ class ActivityService(BaseService):
65
65
  meta=meta,
66
66
  )
67
67
  )
68
- activity_dict = response
69
- return Activity.model_validate(activity_dict)
68
+ return Activity.model_validate(response)
70
69
 
71
70
  def get_activities(
72
71
  self,
@@ -1,8 +1,10 @@
1
1
 
2
2
  from .create import create_variables
3
3
  from .get_download_url import get_download_url_params
4
+ from .delete_content import delete_content_params
4
5
 
5
6
  __all__ = (
6
7
  "create_variables",
7
8
  "get_download_url_params",
9
+ "delete_content_params",
8
10
  )
@@ -0,0 +1,10 @@
1
+ def delete_content_params(content_id: str):
2
+ """Generate variables for delete content GraphQL mutation.
3
+
4
+ Args:
5
+ content_id (str): The ID of the content to delete.
6
+
7
+ Returns:
8
+ dict: Variables dictionary for the GraphQL query.
9
+ """
10
+ return {"id": content_id}
@@ -1,6 +1,7 @@
1
1
  from .params import (
2
2
  create_variables,
3
3
  get_download_url_params,
4
+ delete_content_params,
4
5
  )
5
6
 
6
7
  class Queries:
@@ -33,3 +34,13 @@ class Queries:
33
34
  ''',
34
35
  "variables": get_download_url_params
35
36
  }
37
+
38
+ DELETE = {
39
+ "name": "deleteContent",
40
+ "query": '''
41
+ mutation DeleteContent($id: ID!) {
42
+ deleteContent(id: $id)
43
+ }
44
+ ''',
45
+ "variables": delete_content_params
46
+ }
@@ -176,3 +176,22 @@ class ContentService(BaseService):
176
176
  variables=Queries.GET_DOWNLOAD_URL["variables"](content_id)
177
177
  )
178
178
  return response
179
+
180
+ def delete_content(
181
+ self,
182
+ content_id: str,
183
+ ) -> bool:
184
+ '''
185
+ Delete a content by ID.
186
+
187
+ Args:
188
+ content_id (str): The ID of the content to delete.
189
+
190
+ Returns:
191
+ bool: True if deletion was successful.
192
+ '''
193
+ response = self.request_gql(
194
+ query=Queries.DELETE,
195
+ variables=Queries.DELETE["variables"](content_id)
196
+ )
197
+ return response.get("deleteContent", False)
@@ -24,7 +24,6 @@ class Data(CustomBaseModel):
24
24
  annotation: Optional[Annotation] = None
25
25
  predictions: Optional[List[Prediction]] = None
26
26
  meta: Optional[List[DataMeta]] = None
27
- system_meta: Optional[List[DataMeta]] = Field(None, alias="systemMeta")
28
27
  created_at: Optional[str] = Field(None, alias="createdAt")
29
28
  created_by: Optional[str] = Field(None, alias="createdBy")
30
29
  updated_at: Optional[str] = Field(None, alias="updatedAt")
@@ -15,6 +15,12 @@ from .data_list import (
15
15
  DataFilterOptions,
16
16
  DataListFilter,
17
17
  )
18
+ from .get_data_detail import (
19
+ get_data_detail_params,
20
+ )
21
+ from .get_evaluation_value_list import (
22
+ get_evaluation_value_list_params,
23
+ )
18
24
  from .remove_data_from_slice import (
19
25
  remove_data_from_slice_params
20
26
  )
@@ -73,6 +79,8 @@ __all__ = [
73
79
  "get_params",
74
80
  "get_data_id_list_params",
75
81
  "get_data_list_params",
82
+ "get_data_detail_params",
83
+ "get_evaluation_value_list_params",
76
84
  "AnnotationFilter",
77
85
  "AnnotationRangeFilter",
78
86
  "DataFilterOptions",
@@ -36,7 +36,7 @@ def create_params(
36
36
  "meta": version.meta,
37
37
  }
38
38
  for version in data.annotation.versions
39
- ],
39
+ ] if data.annotation.versions is not None else [],
40
40
  "meta": data.annotation.meta
41
41
  } if data.annotation is not None else None,
42
42
  "predictions": [
@@ -56,13 +56,5 @@ def create_params(
56
56
  "value": meta.value,
57
57
  }
58
58
  for meta in data.meta
59
- ] if data.meta is not None else None,
60
- "systemMeta": [
61
- {
62
- "key": meta.key,
63
- "type": meta.type.value,
64
- "value": meta.value,
65
- }
66
- for meta in data.system_meta
67
- ] if data.system_meta is not None else None,
59
+ ] if data.meta is not None else [],
68
60
  }
@@ -0,0 +1,14 @@
1
+ def get_data_detail_params(dataset_id: str, data_id: str):
2
+ """Generate variables for get data detail GraphQL query.
3
+
4
+ Args:
5
+ dataset_id (str): The ID of the dataset.
6
+ data_id (str): The ID of the data.
7
+
8
+ Returns:
9
+ dict: Variables dictionary for the GraphQL query.
10
+ """
11
+ return {
12
+ "datasetId": dataset_id,
13
+ "id": data_id
14
+ }
@@ -0,0 +1,36 @@
1
+ from typing import Union
2
+ from spb_onprem.base_types import UndefinedType, Undefined
3
+
4
+
5
+ def get_evaluation_value_list_params(
6
+ dataset_id: str,
7
+ prediction_set_id: str,
8
+ filter: Union[UndefinedType, dict] = Undefined,
9
+ length: int = 50,
10
+ cursor: Union[UndefinedType, str] = Undefined
11
+ ):
12
+ """Generate variables for get evaluation value list GraphQL query.
13
+
14
+ Args:
15
+ dataset_id (str): The ID of the dataset.
16
+ prediction_set_id (str): The ID of the prediction set.
17
+ filter (Union[UndefinedType, dict], optional): Diagnosis filter for evaluation values.
18
+ length (int): Number of items to retrieve per page.
19
+ cursor (Union[UndefinedType, str], optional): Cursor for pagination.
20
+
21
+ Returns:
22
+ dict: Variables dictionary for the GraphQL query.
23
+ """
24
+ params = {
25
+ "datasetId": dataset_id,
26
+ "predictionSetId": prediction_set_id,
27
+ "length": length
28
+ }
29
+
30
+ if filter is not Undefined:
31
+ params["filter"] = filter
32
+
33
+ if cursor is not Undefined:
34
+ params["cursor"] = cursor
35
+
36
+ return params
@@ -23,10 +23,6 @@ def update_params(
23
23
  Optional[List[DataMeta]],
24
24
  UndefinedType
25
25
  ] = Undefined,
26
- system_meta: Union[
27
- Optional[List[DataMeta]],
28
- UndefinedType
29
- ] = Undefined,
30
26
  ):
31
27
  """Make the variables for the updateData query.
32
28
 
@@ -56,17 +52,5 @@ def update_params(
56
52
  }
57
53
  for meta in meta
58
54
  ] if meta is not None else None
59
-
60
- if system_meta is not Undefined:
61
- if system_meta is not None and not isinstance(system_meta, list):
62
- raise ValueError("meta must be a list of DataMeta or None.")
63
- variables["systemMeta"] = [
64
- {
65
- "key": meta.key,
66
- "type": meta.type.value,
67
- "value": meta.value,
68
- }
69
- for meta in system_meta
70
- ] if system_meta is not None else None
71
55
 
72
56
  return variables
@@ -11,10 +11,6 @@ def upsert_data_meta_params(
11
11
  Optional[List[DataMeta]],
12
12
  UndefinedType
13
13
  ] = Undefined,
14
- system_meta: Union[
15
- Optional[List[DataMeta]],
16
- UndefinedType
17
- ] = Undefined,
18
14
  ):
19
15
  """Make the variables for the upsertDataMeta query.
20
16
 
@@ -37,12 +33,4 @@ def upsert_data_meta_params(
37
33
  for item in meta
38
34
  ] if meta is not None else None
39
35
 
40
- if system_meta is not Undefined:
41
- if system_meta is not None and not isinstance(system_meta, list):
42
- raise ValueError("system_meta must be a list of DataMeta or None.")
43
- variables["system_meta"] = [
44
- item.model_dump(by_alias=True, exclude_unset=True)
45
- for item in system_meta
46
- ] if system_meta is not None else None
47
-
48
36
  return variables
@@ -4,6 +4,8 @@ from .params import (
4
4
  get_params,
5
5
  get_data_id_list_params,
6
6
  get_data_list_params,
7
+ get_data_detail_params,
8
+ get_evaluation_value_list_params,
7
9
  remove_data_from_slice_params,
8
10
  insert_data_to_slice_params,
9
11
  delete_data_params,
@@ -106,6 +108,34 @@ class Schemas:
106
108
  {DATA}
107
109
  }}
108
110
  '''
111
+
112
+ DATA_DETAIL = '''
113
+ id
114
+ datasetId
115
+ scene {
116
+ id
117
+ content {
118
+ id
119
+ }
120
+ }
121
+ annotation {
122
+ versions {
123
+ id
124
+ content {
125
+ id
126
+ }
127
+ }
128
+ }
129
+ predictions {
130
+ id
131
+ content {
132
+ id
133
+ }
134
+ }
135
+ thumbnail {
136
+ id
137
+ }
138
+ '''
109
139
 
110
140
 
111
141
  class Queries():
@@ -122,7 +152,6 @@ class Queries():
122
152
  $annotation: AnnotationInput,
123
153
  $predictions: [PredictionInput!],
124
154
  $meta: [DataMetaInput!]
125
- $systemMeta: [DataMetaInput!]
126
155
  ) {{
127
156
  createData(
128
157
  datasetId: $datasetId,
@@ -134,7 +163,6 @@ class Queries():
134
163
  annotation: $annotation,
135
164
  predictions: $predictions,
136
165
  meta: $meta,
137
- systemMeta: $systemMeta
138
166
  )
139
167
  {{
140
168
  {Schemas.DATA}
@@ -151,15 +179,13 @@ class Queries():
151
179
  $dataset_id: ID!,
152
180
  $data_id: ID!,
153
181
  $key: String,
154
- $meta: [DataMetaInput!],
155
- $systemMeta: [DataMetaInput!]
182
+ $meta: [DataMetaInput!]
156
183
  ) {{
157
184
  updateData(
158
185
  datasetId: $dataset_id,
159
186
  id: $data_id,
160
187
  key: $key,
161
- meta: $meta,
162
- systemMeta: $systemMeta
188
+ meta: $meta
163
189
  )
164
190
  {{
165
191
  {Schemas.DATA}
@@ -594,3 +620,43 @@ class Queries():
594
620
  ''',
595
621
  "variables": update_frames_params,
596
622
  }
623
+
624
+ GET_DETAIL = {
625
+ "name": "getDataDetail",
626
+ "query": f'''
627
+ query GetDataDetail($datasetId: String!, $id: String!) {{
628
+ data(datasetId: $datasetId, id: $id) {{
629
+ {Schemas.DATA_DETAIL}
630
+ }}
631
+ }}
632
+ ''',
633
+ "variables": get_data_detail_params
634
+ }
635
+
636
+ GET_EVALUATION_VALUE_LIST = {
637
+ "name": "getEvaluationValueList",
638
+ "query": '''
639
+ query GetEvaluationValueList(
640
+ $datasetId: String!,
641
+ $predictionSetId: String!,
642
+ $filter: DiagnosisFilter,
643
+ $length: Int,
644
+ $cursor: String
645
+ ) {
646
+ evaluationValueList(
647
+ datasetId: $datasetId,
648
+ predictionSetId: $predictionSetId,
649
+ filter: $filter,
650
+ length: $length,
651
+ cursor: $cursor
652
+ ) {
653
+ totalCount
654
+ next
655
+ data {
656
+ dataId
657
+ }
658
+ }
659
+ }
660
+ ''',
661
+ "variables": get_evaluation_value_list_params
662
+ }
@@ -1,19 +1,13 @@
1
1
  """
2
- base_service.py
3
-
4
- This module defines the BaseService class, which serves as an abstract base class for services that handle data operations.
2
+ This module defines the DataService class for handling data-related operations.
5
3
 
6
4
  Classes:
7
- BaseService: An abstract base class that requires the implementation of the create_data method.
5
+ DataService: A service class that provides methods for data management operations.
8
6
  """
9
- from io import BytesIO
10
7
  from typing import (
11
8
  Optional, List, Union,
12
9
  )
13
10
 
14
- from spb_onprem.contents.service import (
15
- ContentService
16
- )
17
11
  from spb_onprem.base_service import BaseService
18
12
  from spb_onprem.base_types import (
19
13
  Undefined,
@@ -22,17 +16,11 @@ from spb_onprem.base_types import (
22
16
  from .queries import Queries
23
17
  from .entities import (
24
18
  Data,
25
- Scene,
26
19
  AnnotationVersion,
27
- Annotation,
28
- DataMeta,
29
20
  Prediction,
30
21
  Frame,
31
22
  )
32
23
  from .enums import (
33
- DataType,
34
- SceneType,
35
- DataMetaValue,
36
24
  DataStatus,
37
25
  )
38
26
  from .params import (
@@ -177,105 +165,23 @@ class DataService(BaseService):
177
165
  response.get("totalCount", 0)
178
166
  )
179
167
 
180
- def _detect_image_type(self, file_data: BytesIO) -> str:
181
- """Detect image type from BytesIO data using magic numbers.
182
-
183
- Args:
184
- file_data (BytesIO): The image data.
185
-
186
- Returns:
187
- str: The MIME type of the image.
188
-
189
- Raises:
190
- BadParameterError: If the file format is not a supported image type.
191
- """
192
- current_pos = file_data.tell()
193
- file_data.seek(0)
194
- header = file_data.read(12)
195
- file_data.seek(current_pos)
196
-
197
- if header.startswith(b'\xff\xd8\xff'):
198
- return 'image/jpeg'
199
- elif header.startswith(b'\x89PNG\r\n\x1a\n'):
200
- return 'image/png'
201
- elif header.startswith(b'GIF8'):
202
- return 'image/gif'
203
- elif header.startswith(b'RIFF') and header[8:12] == b'WEBP':
204
- return 'image/webp'
205
- else:
206
- raise BadParameterError(
207
- "Unsupported image format. Only JPEG, PNG, GIF, and WebP formats are supported."
208
- )
209
-
210
- def create_image_data(
168
+ def create_data(
211
169
  self,
212
- dataset_id: str,
213
- key: str,
214
- image_content: Union[
215
- BytesIO,
216
- str,
217
- ],
218
- slices: Optional[List[str]] = None,
219
- annotation: Optional[dict] = None,
220
- predictions: Optional[List[dict]] = None,
221
- meta: Optional[List[dict]] = None,
222
- system_meta: Optional[List[dict]] = None,
170
+ data: Data,
223
171
  ):
224
- """Create an image data.
172
+ """Create data in the dataset.
225
173
 
226
174
  Args:
227
- dataset_id (str): The dataset id.
228
- key (str): The key of the data.
229
- image_content (Union[BytesIO, str]): The image content. If str, it is considered as a file path.
230
- slices (Optional[List[str]]): The slices to add the data to.
231
- annotation (Optional[dict]): The annotation data.
232
- predictions (Optional[List[dict]]): The predictions data.
233
- meta (Optional[List[dict]]): The meta data.
234
- system_meta (Optional[List[dict]]): The system meta data.
175
+ data (Data): The data object to create.
235
176
 
236
177
  Returns:
237
178
  Data: The created data.
238
179
  """
239
- content_service = ContentService()
240
- if isinstance(image_content, str):
241
- content = content_service.upload_content(
242
- image_content,
243
- key,
244
- )
245
- else:
246
- # Detect the image type from BytesIO data
247
- content_type = self._detect_image_type(image_content)
248
- content = content_service.upload_content_with_data(
249
- image_content,
250
- content_type,
251
- key,
252
- )
253
-
254
- # Create Data object for the create_params function
255
- data_obj = Data(
256
- dataset_id=dataset_id,
257
- key=key,
258
- type=DataType.SUPERB_IMAGE,
259
- slice_ids=slices,
260
- scene=[Scene(
261
- id=f"{key}_scene_0",
262
- type=SceneType.IMAGE,
263
- content=content,
264
- meta={}
265
- )],
266
- thumbnail=content,
267
- annotation=annotation,
268
- predictions=predictions,
269
- meta=meta,
270
- system_meta=system_meta,
271
- )
272
-
273
180
  response = self.request_gql(
274
181
  Queries.CREATE,
275
- Queries.CREATE["variables"](data_obj)
182
+ Queries.CREATE["variables"](data)
276
183
  )
277
- data = Data.model_validate(response)
278
- return data
184
+ return Data.model_validate(response)
279
185
 
280
186
  def update_data(
281
187
  self,
@@ -289,10 +195,6 @@ class DataService(BaseService):
289
195
  List[dict],
290
196
  UndefinedType,
291
197
  ] = Undefined,
292
- system_meta: Union[
293
- List[dict],
294
- UndefinedType,
295
- ] = Undefined,
296
198
  ):
297
199
  """Update a data.
298
200
 
@@ -301,7 +203,6 @@ class DataService(BaseService):
301
203
  data_id (str): The data id.
302
204
  key (Union[str, UndefinedType], optional): The key of the data. Defaults to Undefined.
303
205
  meta (Union[List[dict], UndefinedType], optional): The meta data. Defaults to Undefined.
304
- system_meta (Union[List[dict], UndefinedType], optional): The system meta data. Defaults to Undefined.
305
206
 
306
207
  Returns:
307
208
  Data: The updated data.
@@ -313,7 +214,6 @@ class DataService(BaseService):
313
214
  data_id=data_id,
314
215
  key=key,
315
216
  meta=meta,
316
- system_meta=system_meta,
317
217
  )
318
218
  )
319
219
  data = Data.model_validate(response)
@@ -869,3 +769,98 @@ class DataService(BaseService):
869
769
  )
870
770
  data = Data.model_validate(response)
871
771
  return data
772
+
773
+ def get_data_detail(
774
+ self,
775
+ dataset_id: str,
776
+ data_id: str,
777
+ ) -> Data:
778
+ """Get detailed data information including all nested relationships.
779
+
780
+ This method retrieves comprehensive data information including:
781
+ - Scene content references
782
+ - Annotation versions with content references
783
+ - Predictions with content references
784
+ - Thumbnail references
785
+
786
+ Args:
787
+ dataset_id (str): The dataset ID.
788
+ data_id (str): The data ID.
789
+
790
+ Returns:
791
+ Data: The data object with all nested relationships.
792
+ """
793
+ if dataset_id is None:
794
+ raise BadParameterError("dataset_id is required.")
795
+ if data_id is None:
796
+ raise BadParameterError("data_id is required.")
797
+
798
+ response = self.request_gql(
799
+ Queries.GET_DETAIL,
800
+ Queries.GET_DETAIL["variables"](dataset_id, data_id)
801
+ )
802
+ data_dict = response.get("data", {})
803
+ return Data.model_validate(data_dict)
804
+
805
+ def get_evaluation_value_list(
806
+ self,
807
+ dataset_id: str,
808
+ prediction_set_id: str,
809
+ filter: Union[UndefinedType, dict] = Undefined,
810
+ length: int = 50,
811
+ cursor: Union[UndefinedType, str] = Undefined
812
+ ) -> dict:
813
+ """Get evaluation values list for diagnosis filtering.
814
+
815
+ Retrieves evaluation values for diagnosis filtering with pagination support.
816
+
817
+ Args:
818
+ dataset_id (str): The dataset ID.
819
+ prediction_set_id (str): The prediction set ID.
820
+ filter (Union[UndefinedType, dict]): Diagnosis filter for evaluation values.
821
+ length (int): Number of items to retrieve per page.
822
+ cursor (Union[UndefinedType, str]): Cursor for pagination.
823
+
824
+ Returns:
825
+ dict: Response containing totalCount, next cursor, and data list with dataId fields.
826
+ """
827
+ if dataset_id is None:
828
+ raise BadParameterError("dataset_id is required.")
829
+ if prediction_set_id is None:
830
+ raise BadParameterError("prediction_set_id is required.")
831
+
832
+ response = self.request_gql(
833
+ Queries.GET_EVALUATION_VALUE_LIST,
834
+ Queries.GET_EVALUATION_VALUE_LIST["variables"](
835
+ dataset_id=dataset_id,
836
+ prediction_set_id=prediction_set_id,
837
+ filter=filter,
838
+ length=length,
839
+ cursor=cursor
840
+ )
841
+ )
842
+ return response.get("evaluationValueList", {})
843
+
844
+ def get_total_data_id_count_in_evaluation_value(
845
+ self,
846
+ dataset_id: str,
847
+ prediction_set_id: str,
848
+ filter: Union[UndefinedType, dict] = Undefined
849
+ ) -> int:
850
+ """Get total count of data IDs in evaluation values for diagnosis filtering.
851
+
852
+ Args:
853
+ dataset_id (str): The dataset ID.
854
+ prediction_set_id (str): The prediction set ID.
855
+ filter (Union[UndefinedType, dict]): Diagnosis filter for evaluation values.
856
+
857
+ Returns:
858
+ int: Total count of evaluation values.
859
+ """
860
+ result = self.get_evaluation_value_list(
861
+ dataset_id=dataset_id,
862
+ prediction_set_id=prediction_set_id,
863
+ filter=filter,
864
+ length=1
865
+ )
866
+ return result.get("totalCount", 0)
@@ -2,10 +2,12 @@ from .dataset import dataset_params
2
2
  from .datasets import datasets_params
3
3
  from .create_dataset import create_dataset_params
4
4
  from .update_dataset import update_dataset_params
5
+ from .delete_dataset import delete_dataset_params
5
6
 
6
7
  __all__ = (
7
8
  "dataset_params",
8
9
  "datasets_params",
9
10
  "create_dataset_params",
10
11
  "update_dataset_params",
12
+ "delete_dataset_params",
11
13
  )