superb-ai-onprem 0.3.8__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of superb-ai-onprem might be problematic. Click here for more details.

Files changed (27) hide show
  1. spb_onprem/__init__.py +4 -0
  2. spb_onprem/_version.py +2 -2
  3. spb_onprem/data/entities/__init__.py +2 -0
  4. spb_onprem/data/entities/annotation.py +2 -0
  5. spb_onprem/data/entities/data.py +2 -0
  6. spb_onprem/data/entities/data_slice.py +17 -0
  7. spb_onprem/data/enums/__init__.py +2 -0
  8. spb_onprem/data/enums/data_slice_status.py +12 -0
  9. spb_onprem/data/params/__init__.py +50 -22
  10. spb_onprem/data/params/change_data_labeler.py +23 -0
  11. spb_onprem/data/params/change_data_reviewer.py +23 -0
  12. spb_onprem/data/params/change_data_status.py +23 -0
  13. spb_onprem/data/params/data_list.py +12 -1
  14. spb_onprem/data/params/delete_slice_annotation_version.py +24 -0
  15. spb_onprem/data/params/insert_annotation_version.py +13 -6
  16. spb_onprem/data/params/insert_slice_annotation_version.py +39 -0
  17. spb_onprem/data/params/update_data_slice.py +23 -0
  18. spb_onprem/data/params/update_slice_annotation.py +24 -0
  19. spb_onprem/data/params/update_slice_annotation_version.py +40 -0
  20. spb_onprem/data/queries.py +234 -18
  21. spb_onprem/data/service.py +394 -129
  22. spb_onprem/entities.py +4 -0
  23. {superb_ai_onprem-0.3.8.dist-info → superb_ai_onprem-0.4.0.dist-info}/METADATA +1 -1
  24. {superb_ai_onprem-0.3.8.dist-info → superb_ai_onprem-0.4.0.dist-info}/RECORD +27 -17
  25. {superb_ai_onprem-0.3.8.dist-info → superb_ai_onprem-0.4.0.dist-info}/WHEEL +0 -0
  26. {superb_ai_onprem-0.3.8.dist-info → superb_ai_onprem-0.4.0.dist-info}/licenses/LICENSE +0 -0
  27. {superb_ai_onprem-0.3.8.dist-info → superb_ai_onprem-0.4.0.dist-info}/top_level.txt +0 -0
@@ -32,6 +32,7 @@ from .enums import (
32
32
  DataType,
33
33
  SceneType,
34
34
  DataMetaValue,
35
+ DataSliceStatus,
35
36
  )
36
37
  from .params import (
37
38
  DataListFilter,
@@ -98,103 +99,164 @@ class DataService(BaseService):
98
99
  Queries.GET["variables"](dataset_id=dataset_id, data_key=data_key)
99
100
  )
100
101
  return Data.model_validate(response)
101
-
102
+
102
103
  def get_data_list(
103
104
  self,
104
105
  dataset_id: str,
105
106
  data_filter: Optional[DataListFilter] = None,
106
107
  cursor: Optional[str] = None,
107
- length: Optional[int] = 10,
108
+ length: int = 10
108
109
  ):
109
- """Get a list of data.
110
+ """Get data list of a dataset.
110
111
 
111
112
  Args:
112
113
  dataset_id (str): The dataset id.
113
- filter (Optional[DataListFilter], optional): The filter for the data list. Defaults to None.
114
- cursor (Optional[str], optional): The cursor for the data list. Defaults to None.
115
- length (Optional[int], optional): The length of the data list. Defaults to 10.
116
-
117
- Raises:
118
- BadParameterError: The maximum length is 50.
114
+ data_filter (Optional[DataListFilter]): The filter to apply to the data.
115
+ cursor (Optional[str]): The cursor to use for pagination.
116
+ length (int): The length of the data to retrieve.
119
117
 
120
118
  Returns:
121
- Tuple[List[Data], Optional[str], int]: The data list, the next cursor, and the total count.
119
+ tuple: A tuple containing the data, the next cursor, and the total count of data.
122
120
  """
123
121
  if length > 50:
124
- raise BadParameterError("The maximum length is 50.")
125
-
122
+ raise ValueError("Length must be less than or equal to 50.")
123
+
126
124
  response = self.request_gql(
127
125
  Queries.GET_LIST,
128
126
  Queries.GET_LIST["variables"](
129
127
  dataset_id=dataset_id,
130
- data_list_filter=data_filter,
128
+ data_filter=data_filter,
131
129
  cursor=cursor,
132
130
  length=length
133
131
  )
134
132
  )
135
- data_list = [Data.model_validate(data) for data in response.get("data", [])]
133
+ data_list = response.get("data", [])
134
+ data = [Data.model_validate(data_dict) for data_dict in data_list]
136
135
  return (
137
- data_list,
136
+ data,
138
137
  response.get("next", None),
139
- response.get("totalCount", False)
138
+ response.get("totalCount", 0)
140
139
  )
141
140
 
142
141
  def get_data_id_list(
143
142
  self,
144
143
  dataset_id: str,
145
- filter: Optional[DataListFilter] = None,
144
+ data_filter: Optional[DataListFilter] = None,
146
145
  cursor: Optional[str] = None,
147
- length: Optional[int] = 50,
146
+ length: int = 10
148
147
  ):
149
- """Get a list of data.
148
+ """Get data id list of a dataset.
150
149
 
151
150
  Args:
152
151
  dataset_id (str): The dataset id.
153
- filter (Optional[DataListFilter], optional): The filter for the data list. Defaults to None.
154
- cursor (Optional[str], optional): The cursor for the data list. Defaults to None.
155
- length (Optional[int], optional): The length of the data list. Defaults to 50.
156
-
157
- Raises:
158
- BadParameterError: The maximum length is 500.
152
+ data_filter (Optional[DataListFilter]): The filter to apply to the data.
153
+ cursor (Optional[str]): The cursor to use for pagination.
154
+ length (int): The length of the data to retrieve.
159
155
 
160
156
  Returns:
161
- Tuple[List[Data], Optional[str], int]: The data list, the next cursor, and the total count.
157
+ tuple: A tuple containing the data, the next cursor, and the total count of data.
162
158
  """
163
-
164
- if length > 500:
165
- raise ValueError("The maximum length is 500.")
166
-
159
+ if length > 50:
160
+ raise ValueError("Length must be less than or equal to 50.")
161
+
167
162
  response = self.request_gql(
168
163
  Queries.GET_ID_LIST,
169
164
  Queries.GET_ID_LIST["variables"](
170
165
  dataset_id=dataset_id,
171
- data_list_filter=filter,
166
+ data_filter=data_filter,
172
167
  cursor=cursor,
173
- length=length,
168
+ length=length
174
169
  )
175
170
  )
176
- data_ids = response.get("data", [])
177
- data_list = [Data.model_validate(data_id) for data_id in data_ids]
171
+ data_list = response.get("data", [])
172
+ data = [Data.model_validate(data_dict) for data_dict in data_list]
178
173
  return (
179
- data_list,
174
+ data,
180
175
  response.get("next", None),
181
- response.get("totalCount", False)
176
+ response.get("totalCount", 0)
182
177
  )
183
178
 
179
+ def create_image_data(
180
+ self,
181
+ dataset_id: str,
182
+ key: str,
183
+ image_content: Union[
184
+ BytesIO,
185
+ str,
186
+ ],
187
+ slices: Optional[List[str]] = None,
188
+ annotation: Optional[dict] = None,
189
+ predictions: Optional[List[dict]] = None,
190
+ meta: Optional[List[dict]] = None,
191
+ system_meta: Optional[List[dict]] = None,
192
+ ):
193
+ """Create an image data.
194
+
195
+ Args:
196
+ dataset_id (str): The dataset id.
197
+ key (str): The key of the data.
198
+ image_content (Union[BytesIO, str]): The image content. If str, it is considered as a file path.
199
+ slices (Optional[List[str]]): The slices to add the data to.
200
+ annotation (Optional[dict]): The annotation data.
201
+ predictions (Optional[List[dict]]): The predictions data.
202
+ meta (Optional[List[dict]]): The meta data.
203
+ system_meta (Optional[List[dict]]): The system meta data.
204
+
205
+ Returns:
206
+ Data: The created data.
207
+ """
208
+ content_service = ContentService()
209
+ if isinstance(image_content, str):
210
+ with open(image_content, "rb") as f:
211
+ content = content_service.upload_content(
212
+ f.read(),
213
+ key,
214
+ f.name.split(".")[-1]
215
+ )
216
+ else:
217
+ content = content_service.upload_content(
218
+ image_content.read(),
219
+ key,
220
+ "jpg"
221
+ )
222
+
223
+ response = self.request_gql(
224
+ Queries.CREATE,
225
+ Queries.CREATE["variables"](
226
+ dataset_id=dataset_id,
227
+ key=key,
228
+ type=DataType.SUPERB_IMAGE,
229
+ slices=slices,
230
+ scene=[{
231
+ "id": f"{key}_scene_0",
232
+ "type": SceneType.IMAGE,
233
+ "content": content.model_dump(by_alias=True),
234
+ "meta": {}
235
+ }],
236
+ thumbnail=content.model_dump(by_alias=True),
237
+ annotation=annotation,
238
+ predictions=predictions,
239
+ meta=meta,
240
+ system_meta=system_meta,
241
+ )
242
+ )
243
+ data = Data.model_validate(response)
244
+ return data
245
+
184
246
  def update_data(
185
247
  self,
186
248
  dataset_id: str,
187
249
  data_id: str,
188
250
  key: Union[
189
- Optional[str],
190
- UndefinedType
251
+ str,
252
+ UndefinedType,
191
253
  ] = Undefined,
192
254
  meta: Union[
193
- Optional[List[DataMeta]],
255
+ List[dict],
194
256
  UndefinedType,
195
257
  ] = Undefined,
196
258
  system_meta: Union[
197
- Optional[List[DataMeta]],
259
+ List[dict],
198
260
  UndefinedType,
199
261
  ] = Undefined,
200
262
  ):
@@ -203,20 +265,15 @@ class DataService(BaseService):
203
265
  Args:
204
266
  dataset_id (str): The dataset id.
205
267
  data_id (str): The data id.
206
- key (Union[Optional[str], UndefinedType], optional): The key of the data. Defaults to Undefined.
207
- meta (Union[Optional[List[DataMeta]], UndefinedType], optional): The meta of the data. Defaults to Undefined.
208
- system_meta (Union[Optional[List[DataMeta]], UndefinedType], optional): The system meta of the data. Defaults to Undefined.
268
+ key (Union[str, UndefinedType], optional): The key of the data. Defaults to Undefined.
269
+ meta (Union[List[dict], UndefinedType], optional): The meta data. Defaults to Undefined.
270
+ system_meta (Union[List[dict], UndefinedType], optional): The system meta data. Defaults to Undefined.
209
271
 
210
272
  Returns:
211
273
  Data: The updated data.
212
274
  """
213
- if dataset_id is None:
214
- raise BadParameterError("dataset_id is required.")
215
- if data_id is None:
216
- raise BadParameterError("data_id is required.")
217
-
218
275
  response = self.request_gql(
219
- query=Queries.UPDATE,
276
+ Queries.UPDATE,
220
277
  variables=Queries.UPDATE["variables"](
221
278
  dataset_id=dataset_id,
222
279
  data_id=data_id,
@@ -286,17 +343,15 @@ class DataService(BaseService):
286
343
  data_id (str): The data id.
287
344
 
288
345
  Returns:
289
- Boolean: True if the data is deleted, False otherwise.
346
+ Data: The deleted data.
290
347
  """
291
348
  response = self.request_gql(
292
349
  Queries.DELETE,
293
- Queries.DELETE["variables"](
294
- dataset_id=dataset_id,
295
- data_id=data_id
296
- )
350
+ Queries.DELETE["variables"](dataset_id=dataset_id, data_id=data_id)
297
351
  )
298
- return response
299
-
352
+ data = Data.model_validate(response)
353
+ return data
354
+
300
355
  def insert_prediction(
301
356
  self,
302
357
  dataset_id: str,
@@ -318,12 +373,12 @@ class DataService(BaseService):
318
373
  Queries.INSERT_PREDICTION["variables"](
319
374
  dataset_id=dataset_id,
320
375
  data_id=data_id,
321
- prediction=prediction
376
+ prediction=prediction,
322
377
  )
323
378
  )
324
379
  data = Data.model_validate(response)
325
380
  return data
326
-
381
+
327
382
  def delete_prediction(
328
383
  self,
329
384
  dataset_id: str,
@@ -335,8 +390,8 @@ class DataService(BaseService):
335
390
  Args:
336
391
  dataset_id (str): The dataset id.
337
392
  data_id (str): The data id.
338
- set_id (str): The prediction Set id to delete.
339
-
393
+ set_id (str): The set id.
394
+
340
395
  Returns:
341
396
  Data: The updated data.
342
397
  """
@@ -442,100 +497,310 @@ class DataService(BaseService):
442
497
  data = Data.model_validate(response)
443
498
  return data
444
499
 
445
- def create_data(
500
+ def update_slice_annotation(
446
501
  self,
447
- data: Data,
448
- )-> Data:
449
- """Create a data.
502
+ dataset_id: str,
503
+ data_id: str,
504
+ slice_id: str,
505
+ meta: dict,
506
+ ):
507
+ """Update a slice annotation.
508
+
450
509
  Args:
451
- data (Data): The data to create.
510
+ dataset_id (str): The dataset id.
511
+ data_id (str): The data id.
512
+ slice_id (str): The slice id.
513
+ meta (dict): The meta of the slice annotation.
452
514
 
453
515
  Returns:
454
- Data: The created data object.
516
+ Data: The updated data.
455
517
  """
456
518
  response = self.request_gql(
457
- Queries.CREATE,
458
- Queries.CREATE["variables"](data)
519
+ Queries.UPDATE_SLICE_ANNOTATION,
520
+ Queries.UPDATE_SLICE_ANNOTATION["variables"](
521
+ dataset_id=dataset_id,
522
+ data_id=data_id,
523
+ slice_id=slice_id,
524
+ meta=meta,
525
+ )
459
526
  )
460
- return Data.model_validate(response)
527
+ data = Data.model_validate(response)
528
+ return data
461
529
 
462
- def create_image_data(
530
+ def insert_slice_annotation_version(
463
531
  self,
464
532
  dataset_id: str,
465
- key: str,
466
- image_content: BytesIO,
467
- thumbnail_content: Optional[BytesIO] = None,
468
- slice_ids: Optional[List[str]] = None,
469
- annotation: Optional[dict] = None,
470
- meta: Optional[dict[str, DataMetaValue]] = None,
533
+ data_id: str,
534
+ slice_id: str,
535
+ version: AnnotationVersion,
471
536
  ):
472
- """Create image data in the dataset.
473
- Image processing should be done by the client before calling this method.
537
+ """Insert a slice annotation version.
474
538
 
475
539
  Args:
476
- dataset_id (str): The ID of the dataset to upload the image data to.
477
- key (str): The key for the image data.
478
- image_content (BytesIO): The pre-processed image data to upload.
479
- thumbnail_content (Optional[BytesIO]): Pre-processed thumbnail data. If not provided, no thumbnail will be created.
480
- slice_ids (Optional[List[str]]): List of slice IDs associated with the image data.
481
- annotation (Optional[dict]): Annotations associated with the image data.
482
- meta (Optional[dict[str, DataMetaValue]]): Metadata of the data. { "key1": val1, "key2": val2, ... }
540
+ dataset_id (str): The dataset id.
541
+ data_id (str): The data id.
542
+ slice_id (str): The slice id.
543
+ version (AnnotationVersion): The annotation version.
483
544
 
484
545
  Returns:
485
- Data: The created data object.
546
+ Data: The updated data.
486
547
  """
487
- content_service = ContentService()
488
- data = Data(
489
- dataset_id=dataset_id,
490
- key=key,
491
- type=DataType.SUPERB_IMAGE,
548
+ if dataset_id is None:
549
+ raise BadParameterError("dataset_id is required.")
550
+ if data_id is None:
551
+ raise BadParameterError("data_id is required.")
552
+ if slice_id is None:
553
+ raise BadParameterError("slice_id is required.")
554
+ if version is None:
555
+ raise BadParameterError("version is required.")
556
+
557
+ response = self.request_gql(
558
+ Queries.INSERT_SLICE_ANNOTATION_VERSION,
559
+ Queries.INSERT_SLICE_ANNOTATION_VERSION["variables"](
560
+ dataset_id=dataset_id,
561
+ data_id=data_id,
562
+ slice_id=slice_id,
563
+ version=version,
564
+ )
492
565
  )
493
- data.slice_ids = slice_ids
566
+ data = Data.model_validate(response)
567
+ return data
494
568
 
495
- # Upload main image content
496
- image_content_obj = content_service.upload_content_with_data(
497
- image_content,
498
- content_type="image/jpeg",
499
- key=f"{key}_image",
500
- )
501
- scene = Scene(
502
- type=SceneType.IMAGE,
503
- content=image_content_obj,
504
- meta=None,
569
+ def update_slice_annotation_version(
570
+ self,
571
+ dataset_id: str,
572
+ data_id: str,
573
+ slice_id: str,
574
+ id: str,
575
+ channel: Union[str, UndefinedType, None] = Undefined,
576
+ version: Union[str, UndefinedType, None] = Undefined,
577
+ meta: Union[dict, UndefinedType, None] = Undefined,
578
+ ):
579
+ """Update a slice annotation version.
580
+
581
+ Args:
582
+ dataset_id (str): The dataset id.
583
+ data_id (str): The data id.
584
+ slice_id (str): The slice id.
585
+ id (str): The annotation version id.
586
+ channel (Union[str, UndefinedType, None], optional): The channel. Defaults to Undefined.
587
+ version (Union[str, UndefinedType, None], optional): The version. Defaults to Undefined.
588
+ meta (Union[dict, UndefinedType, None], optional): The meta. Defaults to Undefined.
589
+
590
+ Returns:
591
+ Data: The updated data.
592
+ """
593
+ if dataset_id is None:
594
+ raise BadParameterError("dataset_id is required.")
595
+ if data_id is None:
596
+ raise BadParameterError("data_id is required.")
597
+ if slice_id is None:
598
+ raise BadParameterError("slice_id is required.")
599
+ if id is None:
600
+ raise BadParameterError("id is required.")
601
+
602
+ response = self.request_gql(
603
+ Queries.UPDATE_SLICE_ANNOTATION_VERSION,
604
+ Queries.UPDATE_SLICE_ANNOTATION_VERSION["variables"](
605
+ dataset_id=dataset_id,
606
+ data_id=data_id,
607
+ slice_id=slice_id,
608
+ id=id,
609
+ channel=channel,
610
+ version=version,
611
+ meta=meta,
612
+ )
505
613
  )
506
- data.scene = [scene]
507
-
508
- # Upload thumbnail if provided
509
- if thumbnail_content is not None:
510
- thumbnail_content_obj = content_service.upload_content_with_data(
511
- thumbnail_content,
512
- content_type="image/jpeg",
513
- key=f"{key}_thumbnail",
614
+ data = Data.model_validate(response)
615
+ return data
616
+
617
+ def delete_slice_annotation_version(
618
+ self,
619
+ dataset_id: str,
620
+ data_id: str,
621
+ slice_id: str,
622
+ id: str,
623
+ ):
624
+ """Delete a slice annotation version.
625
+
626
+ Args:
627
+ dataset_id (str): The dataset id.
628
+ data_id (str): The data id.
629
+ slice_id (str): The slice id.
630
+ id (str): The annotation version id.
631
+
632
+ Returns:
633
+ Data: The updated data.
634
+ """
635
+ if dataset_id is None:
636
+ raise BadParameterError("dataset_id is required.")
637
+ if data_id is None:
638
+ raise BadParameterError("data_id is required.")
639
+ if slice_id is None:
640
+ raise BadParameterError("slice_id is required.")
641
+ if id is None:
642
+ raise BadParameterError("id is required.")
643
+
644
+ response = self.request_gql(
645
+ Queries.DELETE_SLICE_ANNOTATION_VERSION,
646
+ Queries.DELETE_SLICE_ANNOTATION_VERSION["variables"](
647
+ dataset_id=dataset_id,
648
+ data_id=data_id,
649
+ slice_id=slice_id,
650
+ id=id,
514
651
  )
515
- data.thumbnail = thumbnail_content_obj
652
+ )
653
+ data = Data.model_validate(response)
654
+ return data
655
+
656
+ def change_data_status(
657
+ self,
658
+ dataset_id: str,
659
+ data_id: str,
660
+ slice_id: str,
661
+ status: DataSliceStatus,
662
+ ):
663
+ """Change the status of a data slice.
664
+
665
+ Args:
666
+ dataset_id (str): The dataset id.
667
+ data_id (str): The data id.
668
+ slice_id (str): The slice id.
669
+ status (DataSliceStatus): The new status.
516
670
 
517
- # Handle annotation if provided
518
- if annotation is not None:
519
- annotation_content = content_service.upload_json_content(
520
- annotation,
521
- key=f"{key}_annotation",
671
+ Returns:
672
+ Data: The updated data.
673
+ """
674
+ if dataset_id is None:
675
+ raise BadParameterError("dataset_id is required.")
676
+ if data_id is None:
677
+ raise BadParameterError("data_id is required.")
678
+ if slice_id is None:
679
+ raise BadParameterError("slice_id is required.")
680
+ if status is None:
681
+ raise BadParameterError("status is required.")
682
+
683
+ response = self.request_gql(
684
+ Queries.CHANGE_DATA_STATUS,
685
+ Queries.CHANGE_DATA_STATUS["variables"](
686
+ dataset_id=dataset_id,
687
+ data_id=data_id,
688
+ slice_id=slice_id,
689
+ status=status,
522
690
  )
523
- annotation_version = AnnotationVersion(
524
- content=annotation_content,
525
- meta=None
691
+ )
692
+ data = Data.model_validate(response)
693
+ return data
694
+
695
+ def change_data_labeler(
696
+ self,
697
+ dataset_id: str,
698
+ data_id: str,
699
+ slice_id: str,
700
+ labeler: Optional[str],
701
+ ):
702
+ """Change the labeler of a data slice.
703
+
704
+ Args:
705
+ dataset_id (str): The dataset id.
706
+ data_id (str): The data id.
707
+ slice_id (str): The slice id.
708
+ labeler (Optional[str]): The labeler id. None to unassign.
709
+
710
+ Returns:
711
+ Data: The updated data.
712
+ """
713
+ if dataset_id is None:
714
+ raise BadParameterError("dataset_id is required.")
715
+ if data_id is None:
716
+ raise BadParameterError("data_id is required.")
717
+ if slice_id is None:
718
+ raise BadParameterError("slice_id is required.")
719
+
720
+ response = self.request_gql(
721
+ Queries.CHANGE_DATA_LABELER,
722
+ Queries.CHANGE_DATA_LABELER["variables"](
723
+ dataset_id=dataset_id,
724
+ data_id=data_id,
725
+ slice_id=slice_id,
726
+ labeler=labeler,
526
727
  )
527
- annotation_obj = Annotation(
528
- versions=[annotation_version],
529
- meta=None
728
+ )
729
+ data = Data.model_validate(response)
730
+ return data
731
+
732
+ def change_data_reviewer(
733
+ self,
734
+ dataset_id: str,
735
+ data_id: str,
736
+ slice_id: str,
737
+ reviewer: Optional[str],
738
+ ):
739
+ """Change the reviewer of a data slice.
740
+
741
+ Args:
742
+ dataset_id (str): The dataset id.
743
+ data_id (str): The data id.
744
+ slice_id (str): The slice id.
745
+ reviewer (Optional[str]): The reviewer id. None to unassign.
746
+
747
+ Returns:
748
+ Data: The updated data.
749
+ """
750
+ if dataset_id is None:
751
+ raise BadParameterError("dataset_id is required.")
752
+ if data_id is None:
753
+ raise BadParameterError("data_id is required.")
754
+ if slice_id is None:
755
+ raise BadParameterError("slice_id is required.")
756
+
757
+ response = self.request_gql(
758
+ Queries.CHANGE_DATA_REVIEWER,
759
+ Queries.CHANGE_DATA_REVIEWER["variables"](
760
+ dataset_id=dataset_id,
761
+ data_id=data_id,
762
+ slice_id=slice_id,
763
+ reviewer=reviewer,
530
764
  )
531
- data.annotation = annotation_obj
765
+ )
766
+ data = Data.model_validate(response)
767
+ return data
768
+
769
+ def update_data_slice(
770
+ self,
771
+ dataset_id: str,
772
+ data_id: str,
773
+ slice_id: str,
774
+ meta: dict,
775
+ ):
776
+ """Update the metadata of a data slice.
532
777
 
533
- # Handle metadata if provided
534
- if meta is not None:
535
- data.meta = DataMeta.from_dict(meta)
778
+ Args:
779
+ dataset_id (str): The dataset id.
780
+ data_id (str): The data id.
781
+ slice_id (str): The slice id.
782
+ meta (dict): The meta of the data slice.
536
783
 
784
+ Returns:
785
+ Data: The updated data.
786
+ """
787
+ if dataset_id is None:
788
+ raise BadParameterError("dataset_id is required.")
789
+ if data_id is None:
790
+ raise BadParameterError("data_id is required.")
791
+ if slice_id is None:
792
+ raise BadParameterError("slice_id is required.")
793
+ if meta is None:
794
+ raise BadParameterError("meta is required.")
795
+
537
796
  response = self.request_gql(
538
- Queries.CREATE,
539
- Queries.CREATE["variables"](data)
797
+ Queries.UPDATE_DATA_SLICE,
798
+ Queries.UPDATE_DATA_SLICE["variables"](
799
+ dataset_id=dataset_id,
800
+ data_id=data_id,
801
+ slice_id=slice_id,
802
+ meta=meta,
803
+ )
540
804
  )
541
- return Data.model_validate(response)
805
+ data = Data.model_validate(response)
806
+ return data