superb-ai-onprem 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of superb-ai-onprem might be problematic. Click here for more details.

Files changed (72) hide show
  1. spb_onprem/__init__.py +74 -0
  2. spb_onprem/_version.py +21 -0
  3. spb_onprem/base_model.py +6 -0
  4. spb_onprem/base_service.py +164 -0
  5. spb_onprem/base_types.py +11 -0
  6. spb_onprem/contents/__init__.py +6 -0
  7. spb_onprem/contents/entities/__init__.py +8 -0
  8. spb_onprem/contents/entities/base_content.py +13 -0
  9. spb_onprem/contents/entities/content.py +17 -0
  10. spb_onprem/contents/queries.py +39 -0
  11. spb_onprem/contents/service.py +132 -0
  12. spb_onprem/data/__init__.py +6 -0
  13. spb_onprem/data/entities/__init__.py +15 -0
  14. spb_onprem/data/entities/annotation.py +25 -0
  15. spb_onprem/data/entities/data.py +28 -0
  16. spb_onprem/data/entities/data_meta.py +31 -0
  17. spb_onprem/data/entities/prediction.py +13 -0
  18. spb_onprem/data/entities/scene.py +14 -0
  19. spb_onprem/data/enums/__init__.py +10 -0
  20. spb_onprem/data/enums/data_meta_type.py +15 -0
  21. spb_onprem/data/enums/data_type.py +9 -0
  22. spb_onprem/data/enums/scene_type.py +10 -0
  23. spb_onprem/data/params/__init__.py +59 -0
  24. spb_onprem/data/params/create_data.py +68 -0
  25. spb_onprem/data/params/data.py +24 -0
  26. spb_onprem/data/params/data_list.py +96 -0
  27. spb_onprem/data/params/delete_annotation_version.py +20 -0
  28. spb_onprem/data/params/delete_data.py +17 -0
  29. spb_onprem/data/params/delete_prediction.py +22 -0
  30. spb_onprem/data/params/delete_scene.py +22 -0
  31. spb_onprem/data/params/insert_annotation_version.py +29 -0
  32. spb_onprem/data/params/insert_data_to_slice.py +22 -0
  33. spb_onprem/data/params/insert_prediction.py +25 -0
  34. spb_onprem/data/params/insert_scene.py +32 -0
  35. spb_onprem/data/params/remove_data_from_slice.py +22 -0
  36. spb_onprem/data/params/remove_data_meta.py +64 -0
  37. spb_onprem/data/params/update_annotation.py +30 -0
  38. spb_onprem/data/params/update_data.py +72 -0
  39. spb_onprem/data/params/update_scene.py +37 -0
  40. spb_onprem/data/params/upsert_data_meta.py +48 -0
  41. spb_onprem/data/queries.py +360 -0
  42. spb_onprem/data/service.py +524 -0
  43. spb_onprem/datasets/__init__.py +6 -0
  44. spb_onprem/datasets/entities/__init__.py +6 -0
  45. spb_onprem/datasets/entities/dataset.py +14 -0
  46. spb_onprem/datasets/params/__init__.py +11 -0
  47. spb_onprem/datasets/params/create_dataset.py +32 -0
  48. spb_onprem/datasets/params/dataset.py +26 -0
  49. spb_onprem/datasets/params/datasets.py +53 -0
  50. spb_onprem/datasets/params/update_dataset.py +39 -0
  51. spb_onprem/datasets/queries.py +79 -0
  52. spb_onprem/datasets/service.py +132 -0
  53. spb_onprem/exceptions.py +40 -0
  54. spb_onprem/slices/__init__.py +6 -0
  55. spb_onprem/slices/entities/__init__.py +5 -0
  56. spb_onprem/slices/entities/slice.py +17 -0
  57. spb_onprem/slices/params/__init__.py +23 -0
  58. spb_onprem/slices/params/create_slice.py +36 -0
  59. spb_onprem/slices/params/delete_slice.py +0 -0
  60. spb_onprem/slices/params/slice.py +42 -0
  61. spb_onprem/slices/params/slices.py +62 -0
  62. spb_onprem/slices/params/update_slice.py +45 -0
  63. spb_onprem/slices/queries.py +121 -0
  64. spb_onprem/slices/service.py +173 -0
  65. spb_onprem/users/__init__.py +0 -0
  66. spb_onprem/users/entities/__init__.py +5 -0
  67. spb_onprem/users/entities/auth.py +86 -0
  68. superb_ai_onprem-0.1.0.dist-info/METADATA +246 -0
  69. superb_ai_onprem-0.1.0.dist-info/RECORD +72 -0
  70. superb_ai_onprem-0.1.0.dist-info/WHEEL +5 -0
  71. superb_ai_onprem-0.1.0.dist-info/licenses/LICENSE +21 -0
  72. superb_ai_onprem-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,524 @@
1
+ """
2
+ base_service.py
3
+
4
+ This module defines the BaseService class, which serves as an abstract base class for services that handle data operations.
5
+
6
+ Classes:
7
+ BaseService: An abstract base class that requires the implementation of the create_data method.
8
+ """
9
+ from io import BytesIO
10
+ from typing import (
11
+ Optional, List, Union,
12
+ )
13
+
14
+ from spb_onprem.contents.service import (
15
+ ContentService
16
+ )
17
+ from spb_onprem.base_service import BaseService
18
+ from spb_onprem.base_types import (
19
+ Undefined,
20
+ UndefinedType,
21
+ )
22
+ from .queries import Queries
23
+ from .entities import (
24
+ Data,
25
+ Scene,
26
+ AnnotationVersion,
27
+ Annotation,
28
+ DataMeta,
29
+ Prediction,
30
+ )
31
+ from .enums import (
32
+ DataType,
33
+ SceneType,
34
+ DataMetaValue,
35
+ )
36
+ from .params import (
37
+ DataListFilter,
38
+ )
39
+ from spb_onprem.exceptions import BadParameterError
40
+
41
+ class DataService(BaseService):
42
+ """
43
+ Service class for handling data-related operations.
44
+ """
45
+
46
+ def get_data(
47
+ self,
48
+ dataset_id: str,
49
+ data_id: str,
50
+ ):
51
+ """Get a data by id or key.
52
+
53
+ Args:
54
+ dataset_id (str): The dataset id.
55
+ data_id (Union[ str, UndefinedType ], optional): The id of the data. Defaults to Undefined.
56
+
57
+ Raises:
58
+ BadParameterError: Either data_id or key must be provided.
59
+
60
+ Returns:
61
+ Data: The data.
62
+ """
63
+ if dataset_id is None:
64
+ raise BadParameterError("dataset_id is required.")
65
+ if data_id is None:
66
+ raise BadParameterError("data_id is required.")
67
+
68
+ response = self.request_gql(
69
+ Queries.GET,
70
+ Queries.GET["variables"](
71
+ dataset_id=dataset_id,
72
+ data_id=data_id,
73
+ )
74
+ )
75
+
76
+ return Data.model_validate(response)
77
+
78
+ def get_data_by_key(
79
+ self,
80
+ dataset_id: str,
81
+ data_key: str,
82
+ ):
83
+ """Get a data by key.
84
+
85
+ Args:
86
+ dataset_id (str): The dataset id.
87
+ data_key (str): The key of the data.
88
+
89
+ Returns:
90
+ Data: The data.
91
+ """
92
+ if dataset_id is None:
93
+ raise BadParameterError("dataset_id is required.")
94
+ if data_key is None:
95
+ raise BadParameterError("data_key is required.")
96
+ response = self.request_gql(
97
+ Queries.GET,
98
+ Queries.GET["variables"](dataset_id=dataset_id, data_key=data_key)
99
+ )
100
+ return Data.model_validate(response)
101
+
102
+ def get_data_list(
103
+ self,
104
+ dataset_id: str,
105
+ data_filter: Optional[DataListFilter] = None,
106
+ cursor: Optional[str] = None,
107
+ length: Optional[int] = 10,
108
+ ):
109
+ """Get a list of data.
110
+
111
+ Args:
112
+ dataset_id (str): The dataset id.
113
+ filter (Optional[DataListFilter], optional): The filter for the data list. Defaults to None.
114
+ cursor (Optional[str], optional): The cursor for the data list. Defaults to None.
115
+ length (Optional[int], optional): The length of the data list. Defaults to 10.
116
+
117
+ Raises:
118
+ BadParameterError: The maximum length is 50.
119
+
120
+ Returns:
121
+ Tuple[List[Data], Optional[str], int]: The data list, the next cursor, and the total count.
122
+ """
123
+ if length > 50:
124
+ raise BadParameterError("The maximum length is 50.")
125
+
126
+ response = self.request_gql(
127
+ Queries.GET_LIST,
128
+ Queries.GET_LIST["variables"](
129
+ dataset_id=dataset_id,
130
+ data_list_filter=data_filter,
131
+ cursor=cursor,
132
+ length=length
133
+ )
134
+ )
135
+ data_list = [Data.model_validate(data) for data in response.get("data", [])]
136
+ return (
137
+ data_list,
138
+ response.get("next", None),
139
+ response.get("totalCount", False)
140
+ )
141
+
142
+ def get_data_id_list(
143
+ self,
144
+ dataset_id: str,
145
+ filter: Optional[DataListFilter] = None,
146
+ cursor: Optional[str] = None,
147
+ length: Optional[int] = 50,
148
+ ):
149
+ """Get a list of data.
150
+
151
+ Args:
152
+ dataset_id (str): The dataset id.
153
+ filter (Optional[DataListFilter], optional): The filter for the data list. Defaults to None.
154
+ cursor (Optional[str], optional): The cursor for the data list. Defaults to None.
155
+ length (Optional[int], optional): The length of the data list. Defaults to 50.
156
+
157
+ Raises:
158
+ BadParameterError: The maximum length is 500.
159
+
160
+ Returns:
161
+ Tuple[List[Data], Optional[str], int]: The data list, the next cursor, and the total count.
162
+ """
163
+
164
+ if length > 50:
165
+ raise ValueError("The maximum length is 500.")
166
+
167
+ response = self.request_gql(
168
+ Queries.GET_ID_LIST,
169
+ Queries.GET_ID_LIST["variables"](
170
+ dataset_id=dataset_id,
171
+ data_list_filter=filter,
172
+ cursor=cursor,
173
+ length=length,
174
+ )
175
+ )
176
+ data_ids = response.get("data", [])
177
+ data_list = [Data.model_validate(data_id) for data_id in data_ids]
178
+ return (
179
+ data_list,
180
+ response.get("next", None),
181
+ response.get("totalCount", False)
182
+ )
183
+
184
+ def update_data(
185
+ self,
186
+ dataset_id: str,
187
+ data_id: str,
188
+ key: Union[
189
+ Optional[str],
190
+ UndefinedType
191
+ ] = Undefined,
192
+ meta: Union[
193
+ Optional[List[DataMeta]],
194
+ UndefinedType,
195
+ ] = Undefined,
196
+ system_meta: Union[
197
+ Optional[List[DataMeta]],
198
+ UndefinedType,
199
+ ] = Undefined,
200
+ ):
201
+ """Update a data.
202
+
203
+ Args:
204
+ dataset_id (str): The dataset id.
205
+ data_id (str): The data id.
206
+ key (Union[Optional[str], UndefinedType], optional): The key of the data. Defaults to Undefined.
207
+ meta (Union[Optional[List[DataMeta]], UndefinedType], optional): The meta of the data. Defaults to Undefined.
208
+ system_meta (Union[Optional[List[DataMeta]], UndefinedType], optional): The system meta of the data. Defaults to Undefined.
209
+
210
+ Returns:
211
+ Data: The updated data.
212
+ """
213
+ if dataset_id is None:
214
+ raise BadParameterError("dataset_id is required.")
215
+ if data_id is None:
216
+ raise BadParameterError("data_id is required.")
217
+
218
+ response = self.request_gql(
219
+ query=Queries.UPDATE,
220
+ variables=Queries.UPDATE["variables"](
221
+ dataset_id=dataset_id,
222
+ data_id=data_id,
223
+ key=key,
224
+ meta=meta,
225
+ system_meta=system_meta,
226
+ )
227
+ )
228
+ data = Data.model_validate(response)
229
+ return data
230
+
231
+ def remove_data_feom_slice(
232
+ self,
233
+ dataset_id: str,
234
+ data_id: str,
235
+ slice_id: str,
236
+ ):
237
+ """Remove a data from a slice.
238
+
239
+ Args:
240
+ dataset_id (str): The dataset id.
241
+ data_id (str): The data id.
242
+ slice_id (str): The slice id.
243
+
244
+ Returns:
245
+ Data: The updated data.
246
+ """
247
+ response = self.request_gql(
248
+ Queries.REMOVE_FROM_SLICE,
249
+ Queries.REMOVE_FROM_SLICE["variables"](dataset_id=dataset_id, data_id=data_id, slice_id=slice_id)
250
+ )
251
+ data = Data.model_validate(response)
252
+ return data
253
+
254
+ def add_data_to_slice(
255
+ self,
256
+ dataset_id: str,
257
+ data_id: str,
258
+ slice_id: str,
259
+ ):
260
+ """Add a data to a slice.
261
+
262
+ Args:
263
+ dataset_id (str): The dataset id.
264
+ data_id (str): The data id.
265
+ slice_id (str): The slice id.
266
+
267
+ Returns:
268
+ Data: The updated data.
269
+ """
270
+ response = self.request_gql(
271
+ Queries.ADD_TO_SLICE,
272
+ Queries.ADD_TO_SLICE["variables"](dataset_id=dataset_id, data_id=data_id, slice_id=slice_id)
273
+ )
274
+ data = Data.model_validate(response)
275
+ return data
276
+
277
+ def delete_data(
278
+ self,
279
+ dataset_id: str,
280
+ data_id: str,
281
+ ):
282
+ """Delete a data.
283
+
284
+ Args:
285
+ dataset_id (str): The dataset id.
286
+ data_id (str): The data id.
287
+
288
+ Returns:
289
+ Boolean: True if the data is deleted, False otherwise.
290
+ """
291
+ response = self.request_gql(
292
+ Queries.DELETE,
293
+ Queries.DELETE["variables"](
294
+ dataset_id=dataset_id,
295
+ data_id=data_id
296
+ )
297
+ )
298
+ return response
299
+
300
+ def insert_prediction(
301
+ self,
302
+ dataset_id: str,
303
+ data_id: str,
304
+ prediction: Prediction,
305
+ ):
306
+ """Insert a prediction.
307
+
308
+ Args:
309
+ dataset_id (str): The dataset id.
310
+ data_id (str): The data id.
311
+ prediction (Prediction): The prediction.
312
+
313
+ Returns:
314
+ Data: The updated data.
315
+ """
316
+ response = self.request_gql(
317
+ Queries.INSERT_PREDICTION,
318
+ Queries.INSERT_PREDICTION["variables"](
319
+ dataset_id=dataset_id,
320
+ data_id=data_id,
321
+ prediction=prediction
322
+ )
323
+ )
324
+ data = Data.model_validate(response)
325
+ return data
326
+
327
+ def delete_prediction(
328
+ self,
329
+ dataset_id: str,
330
+ data_id: str,
331
+ set_id: str,
332
+ ):
333
+ """Delete a prediction.
334
+
335
+ Args:
336
+ dataset_id (str): The dataset id.
337
+ data_id (str): The data id.
338
+ set_id (str): The prediction Set id to delete.
339
+
340
+ Returns:
341
+ Data: The updated data.
342
+ """
343
+ response = self.request_gql(
344
+ Queries.DELETE_PREDICTION,
345
+ Queries.DELETE_PREDICTION["variables"](
346
+ dataset_id=dataset_id,
347
+ data_id=data_id,
348
+ set_id=set_id,
349
+ )
350
+ )
351
+ data = Data.model_validate(response)
352
+ return data
353
+
354
+ def update_annotation(
355
+ self,
356
+ dataset_id: str,
357
+ data_id: str,
358
+ meta: Union[
359
+ dict,
360
+ UndefinedType
361
+ ] = Undefined,
362
+ ):
363
+ """Update an annotation.
364
+
365
+ Args:
366
+ dataset_id (str): The dataset id.
367
+ data_id (str): The data id.
368
+ meta (dict): The meta of the annotation.
369
+
370
+ Returns:
371
+ Data: The updated data.
372
+ """
373
+ response = self.request_gql(
374
+ Queries.UPDATE_ANNOTATION,
375
+ Queries.UPDATE_ANNOTATION["variables"](
376
+ dataset_id=dataset_id,
377
+ data_id=data_id,
378
+ meta=meta,
379
+ )
380
+ )
381
+ data = Data.model_validate(response)
382
+ return data
383
+
384
+ def insert_annotation_version(
385
+ self,
386
+ dataset_id: str,
387
+ data_id: str,
388
+ version: AnnotationVersion,
389
+ ):
390
+ """Insert an annotation version.
391
+
392
+ Args:
393
+ dataset_id (str): The dataset id.
394
+ data_id (str): The data id.
395
+ version (AnnotationVersion): The annotation version.
396
+
397
+ Returns:
398
+ Data: The updated data.
399
+ """
400
+ if dataset_id is None:
401
+ raise BadParameterError("dataset_id is required.")
402
+ if data_id is None:
403
+ raise BadParameterError("data_id is required.")
404
+ if version is None:
405
+ raise BadParameterError("version is required.")
406
+
407
+ response = self.request_gql(
408
+ Queries.INSERT_ANNOTATION_VERSION,
409
+ Queries.INSERT_ANNOTATION_VERSION["variables"](
410
+ dataset_id=dataset_id,
411
+ data_id=data_id,
412
+ version=version,
413
+ )
414
+ )
415
+ data = Data.model_validate(response)
416
+ return data
417
+
418
+ def delete_annotation_version(
419
+ self,
420
+ dataset_id: str,
421
+ data_id: str,
422
+ version_id: str,
423
+ ):
424
+ """Delete an annotation version.
425
+
426
+ Args:
427
+ dataset_id (str): The dataset id.
428
+ data_id (str): The data id.
429
+ version_id (str): The version id.
430
+
431
+ Returns:
432
+ Data: The updated data.
433
+ """
434
+ response = self.request_gql(
435
+ Queries.DELETE_ANNOTATION_VERSION,
436
+ Queries.DELETE_ANNOTATION_VERSION["variables"](
437
+ dataset_id=dataset_id,
438
+ data_id=data_id,
439
+ version_id=version_id,
440
+ )
441
+ )
442
+ data = Data.model_validate(response)
443
+ return data
444
+
445
+ def create_image_data(
446
+ self,
447
+ dataset_id: str,
448
+ key: str,
449
+ image_content: BytesIO,
450
+ thumbnail_content: Optional[BytesIO] = None,
451
+ slice_ids: Optional[List[str]] = None,
452
+ annotation: Optional[dict] = None,
453
+ meta: Optional[dict[str, DataMetaValue]] = None,
454
+ ):
455
+ """Create image data in the dataset.
456
+ Image processing should be done by the client before calling this method.
457
+
458
+ Args:
459
+ dataset_id (str): The ID of the dataset to upload the image data to.
460
+ key (str): The key for the image data.
461
+ image_content (BytesIO): The pre-processed image data to upload.
462
+ thumbnail_content (Optional[BytesIO]): Pre-processed thumbnail data. If not provided, no thumbnail will be created.
463
+ slice_ids (Optional[List[str]]): List of slice IDs associated with the image data.
464
+ annotation (Optional[dict]): Annotations associated with the image data.
465
+ meta (Optional[dict[str, DataMetaValue]]): Metadata of the data. { "key1": val1, "key2": val2, ... }
466
+
467
+ Returns:
468
+ Data: The created data object.
469
+ """
470
+ content_service = ContentService()
471
+ data = Data(
472
+ dataset_id=dataset_id,
473
+ key=key,
474
+ type=DataType.SUPERB_IMAGE,
475
+ )
476
+ data.slice_ids = slice_ids
477
+
478
+ # Upload main image content
479
+ image_content_obj = content_service.upload_content_with_data(
480
+ image_content,
481
+ content_type="image/jpeg",
482
+ key=f"{key}_image",
483
+ )
484
+ scene = Scene(
485
+ type=SceneType.IMAGE,
486
+ content=image_content_obj,
487
+ meta=None,
488
+ )
489
+ data.scene = [scene]
490
+
491
+ # Upload thumbnail if provided
492
+ if thumbnail_content is not None:
493
+ thumbnail_content_obj = content_service.upload_content_with_data(
494
+ thumbnail_content,
495
+ content_type="image/jpeg",
496
+ key=f"{key}_thumbnail",
497
+ )
498
+ data.thumbnail = thumbnail_content_obj
499
+
500
+ # Handle annotation if provided
501
+ if annotation is not None:
502
+ annotation_content = content_service.upload_json_content(
503
+ annotation,
504
+ key=f"{key}_annotation",
505
+ )
506
+ annotation_version = AnnotationVersion(
507
+ content=annotation_content,
508
+ meta=None
509
+ )
510
+ annotation_obj = Annotation(
511
+ versions=[annotation_version],
512
+ meta=None
513
+ )
514
+ data.annotation = annotation_obj
515
+
516
+ # Handle metadata if provided
517
+ if meta is not None:
518
+ data.meta = DataMeta.from_dict(meta)
519
+
520
+ response = self.request_gql(
521
+ Queries.CREATE,
522
+ Queries.CREATE["variables"](data)
523
+ )
524
+ return Data.model_validate(response)
@@ -0,0 +1,6 @@
1
+ from .service import DatasetService
2
+
3
+
4
+ __all__ = (
5
+ "DatasetService",
6
+ )
@@ -0,0 +1,6 @@
1
+ from .dataset import Dataset
2
+
3
+
4
+ __all__ = (
5
+ "Dataset",
6
+ )
@@ -0,0 +1,14 @@
1
+ from typing import Optional
2
+ from spb_onprem.base_model import CustomBaseModel, Field
3
+ from spb_onprem.data.entities.data import Data
4
+
5
+
6
+ class Dataset(CustomBaseModel):
7
+ id: Optional[str] = Field(None)
8
+ name: Optional[str] = Field(None)
9
+ description: Optional[str] = Field(None)
10
+
11
+ created_at: Optional[str] = Field(None, alias="createdAt")
12
+ updated_at: Optional[str] = Field(None, alias="updatedAt")
13
+ created_by: Optional[str] = Field(None, alias="createdBy")
14
+ updated_by: Optional[str] = Field(None, alias="updatedBy")
@@ -0,0 +1,11 @@
1
+ from .dataset import dataset_params
2
+ from .datasets import datasets_params
3
+ from .create_dataset import create_dataset_params
4
+ from .update_dataset import update_dataset_params
5
+
6
+ __all__ = (
7
+ "dataset_params",
8
+ "datasets_params",
9
+ "create_dataset_params",
10
+ "update_dataset_params",
11
+ )
@@ -0,0 +1,32 @@
1
+ from typing import Union
2
+ from spb_onprem.base_types import Undefined, UndefinedType
3
+ from spb_onprem.exceptions import BadParameterError
4
+
5
+
6
+ def create_dataset_params(
7
+ name: str,
8
+ description: Union[
9
+ str,
10
+ UndefinedType,
11
+ ] = Undefined,
12
+ ):
13
+ """Create parameters for dataset creation.
14
+
15
+ Args:
16
+ name: Name of the dataset(required)
17
+ description: Optional description of the dataset
18
+
19
+ Returns:
20
+ dict: Parameters for dataset creation
21
+ """
22
+ if name is None:
23
+ raise BadParameterError("Name is required")
24
+
25
+ params = {
26
+ "name": name,
27
+ }
28
+
29
+ if description is not Undefined:
30
+ params["description"] = description
31
+
32
+ return params
@@ -0,0 +1,26 @@
1
+ from typing import Optional
2
+ from spb_onprem.exceptions import BadParameterError
3
+
4
+
5
+ def dataset_params(
6
+ dataset_id: Optional[str] = None,
7
+ name: Optional[str] = None,
8
+ ):
9
+ """Get parameters for dataset lookup.
10
+
11
+ Args:
12
+ dataset_id: Optional dataset ID
13
+ name: Optional dataset name
14
+
15
+ Returns:
16
+ dict: Parameters for dataset lookup
17
+
18
+ Raises:
19
+ ValueError: If neither id nor name is provided
20
+ """
21
+ if dataset_id is not None:
22
+ return {"datasetId": dataset_id}
23
+ elif name is not None:
24
+ return {"name": name}
25
+ else:
26
+ raise BadParameterError("You must provide either id or name.")
@@ -0,0 +1,53 @@
1
+ from typing import Optional, List, Union
2
+
3
+ from spb_onprem.base_model import CustomBaseModel, Field
4
+ from spb_onprem.base_types import Undefined, UndefinedType
5
+
6
+
7
+ class DatasetsFilterOptions(CustomBaseModel):
8
+ """Options for filtering datasets.
9
+
10
+ Attributes:
11
+ name_contains: Filter datasets by name containing this string
12
+ id_in: Filter datasets by list of IDs
13
+ """
14
+ name_contains: Optional[str] = Field(None, alias="nameContains")
15
+ id_in: Optional[List[str]] = Field(None, alias="idIn")
16
+
17
+
18
+ class DatasetsFilter(CustomBaseModel):
19
+ """Filter criteria for dataset queries.
20
+
21
+ Attributes:
22
+ must_filter: Conditions that must be met
23
+ not_filter: Conditions that must not be met
24
+ """
25
+ must_filter: Optional[DatasetsFilterOptions] = Field(None, alias="must")
26
+ not_filter: Optional[DatasetsFilterOptions] = Field(None, alias="not")
27
+
28
+
29
+ def datasets_params(
30
+ datasets_filter: Union[
31
+ DatasetsFilter,
32
+ UndefinedType
33
+ ] = Undefined,
34
+ cursor: Optional[str] = None,
35
+ length: Optional[int] = 10
36
+ ):
37
+ """Get parameters for listing datasets.
38
+
39
+ Args:
40
+ datasets_filter: Optional filter criteria for datasets
41
+ cursor: Optional cursor for pagination
42
+ length: Optional number of items per page (default: 10)
43
+
44
+ Returns:
45
+ dict: Parameters for listing datasets
46
+ """
47
+ return {
48
+ "filter": datasets_filter.model_dump(
49
+ by_alias=True, exclude_unset=True
50
+ ) if datasets_filter else None,
51
+ "cursor": cursor,
52
+ "length": length
53
+ }