superb-ai-onprem 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of superb-ai-onprem might be problematic. Click here for more details.
- spb_onprem/__init__.py +74 -0
- spb_onprem/_version.py +21 -0
- spb_onprem/base_model.py +6 -0
- spb_onprem/base_service.py +164 -0
- spb_onprem/base_types.py +11 -0
- spb_onprem/contents/__init__.py +6 -0
- spb_onprem/contents/entities/__init__.py +8 -0
- spb_onprem/contents/entities/base_content.py +13 -0
- spb_onprem/contents/entities/content.py +17 -0
- spb_onprem/contents/queries.py +39 -0
- spb_onprem/contents/service.py +132 -0
- spb_onprem/data/__init__.py +6 -0
- spb_onprem/data/entities/__init__.py +15 -0
- spb_onprem/data/entities/annotation.py +25 -0
- spb_onprem/data/entities/data.py +28 -0
- spb_onprem/data/entities/data_meta.py +31 -0
- spb_onprem/data/entities/prediction.py +13 -0
- spb_onprem/data/entities/scene.py +14 -0
- spb_onprem/data/enums/__init__.py +10 -0
- spb_onprem/data/enums/data_meta_type.py +15 -0
- spb_onprem/data/enums/data_type.py +9 -0
- spb_onprem/data/enums/scene_type.py +10 -0
- spb_onprem/data/params/__init__.py +59 -0
- spb_onprem/data/params/create_data.py +68 -0
- spb_onprem/data/params/data.py +24 -0
- spb_onprem/data/params/data_list.py +96 -0
- spb_onprem/data/params/delete_annotation_version.py +20 -0
- spb_onprem/data/params/delete_data.py +17 -0
- spb_onprem/data/params/delete_prediction.py +22 -0
- spb_onprem/data/params/delete_scene.py +22 -0
- spb_onprem/data/params/insert_annotation_version.py +29 -0
- spb_onprem/data/params/insert_data_to_slice.py +22 -0
- spb_onprem/data/params/insert_prediction.py +25 -0
- spb_onprem/data/params/insert_scene.py +32 -0
- spb_onprem/data/params/remove_data_from_slice.py +22 -0
- spb_onprem/data/params/remove_data_meta.py +64 -0
- spb_onprem/data/params/update_annotation.py +30 -0
- spb_onprem/data/params/update_data.py +72 -0
- spb_onprem/data/params/update_scene.py +37 -0
- spb_onprem/data/params/upsert_data_meta.py +48 -0
- spb_onprem/data/queries.py +360 -0
- spb_onprem/data/service.py +524 -0
- spb_onprem/datasets/__init__.py +6 -0
- spb_onprem/datasets/entities/__init__.py +6 -0
- spb_onprem/datasets/entities/dataset.py +14 -0
- spb_onprem/datasets/params/__init__.py +11 -0
- spb_onprem/datasets/params/create_dataset.py +32 -0
- spb_onprem/datasets/params/dataset.py +26 -0
- spb_onprem/datasets/params/datasets.py +53 -0
- spb_onprem/datasets/params/update_dataset.py +39 -0
- spb_onprem/datasets/queries.py +79 -0
- spb_onprem/datasets/service.py +132 -0
- spb_onprem/exceptions.py +40 -0
- spb_onprem/slices/__init__.py +6 -0
- spb_onprem/slices/entities/__init__.py +5 -0
- spb_onprem/slices/entities/slice.py +17 -0
- spb_onprem/slices/params/__init__.py +23 -0
- spb_onprem/slices/params/create_slice.py +36 -0
- spb_onprem/slices/params/delete_slice.py +0 -0
- spb_onprem/slices/params/slice.py +42 -0
- spb_onprem/slices/params/slices.py +62 -0
- spb_onprem/slices/params/update_slice.py +45 -0
- spb_onprem/slices/queries.py +121 -0
- spb_onprem/slices/service.py +173 -0
- spb_onprem/users/__init__.py +0 -0
- spb_onprem/users/entities/__init__.py +5 -0
- spb_onprem/users/entities/auth.py +86 -0
- superb_ai_onprem-0.1.0.dist-info/METADATA +246 -0
- superb_ai_onprem-0.1.0.dist-info/RECORD +72 -0
- superb_ai_onprem-0.1.0.dist-info/WHEEL +5 -0
- superb_ai_onprem-0.1.0.dist-info/licenses/LICENSE +21 -0
- superb_ai_onprem-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from .data_list import (
|
|
2
|
+
AnnotationFilter,
|
|
3
|
+
DataListFilter,
|
|
4
|
+
DataFilterOptions,
|
|
5
|
+
get_data_id_list_params,
|
|
6
|
+
get_data_list_params,
|
|
7
|
+
)
|
|
8
|
+
from .create_data import (
|
|
9
|
+
create_params
|
|
10
|
+
)
|
|
11
|
+
from .update_data import (
|
|
12
|
+
update_params
|
|
13
|
+
)
|
|
14
|
+
from .data import (
|
|
15
|
+
get_params
|
|
16
|
+
)
|
|
17
|
+
from .insert_data_to_slice import (
|
|
18
|
+
insert_data_to_slice_params
|
|
19
|
+
)
|
|
20
|
+
from .remove_data_from_slice import (
|
|
21
|
+
remove_data_from_slice_params
|
|
22
|
+
)
|
|
23
|
+
from .delete_data import (
|
|
24
|
+
delete_data_params,
|
|
25
|
+
)
|
|
26
|
+
from .insert_prediction import (
|
|
27
|
+
insert_prediction_params,
|
|
28
|
+
)
|
|
29
|
+
from .delete_prediction import (
|
|
30
|
+
delete_prediction_params,
|
|
31
|
+
)
|
|
32
|
+
from .update_annotation import (
|
|
33
|
+
update_annotation_params,
|
|
34
|
+
)
|
|
35
|
+
from .insert_annotation_version import (
|
|
36
|
+
insert_annotation_version_params,
|
|
37
|
+
)
|
|
38
|
+
from .delete_annotation_version import (
|
|
39
|
+
delete_annotation_version_params,
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
__all__ = (
|
|
43
|
+
"AnnotationFilter",
|
|
44
|
+
"DataListFilter",
|
|
45
|
+
"DataFilterOptions",
|
|
46
|
+
"create_params",
|
|
47
|
+
"update_params",
|
|
48
|
+
"get_params",
|
|
49
|
+
"get_data_id_list_params",
|
|
50
|
+
"get_data_list_params",
|
|
51
|
+
"insert_data_to_slice_params",
|
|
52
|
+
"remove_data_from_slice_params",
|
|
53
|
+
"delete_data_params",
|
|
54
|
+
"insert_prediction_params",
|
|
55
|
+
"delete_prediction_params",
|
|
56
|
+
"update_annotation_params",
|
|
57
|
+
"insert_annotation_version_params",
|
|
58
|
+
"delete_annotation_version_params",
|
|
59
|
+
)
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from spb_onprem.data.entities import Data
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def create_params(
|
|
5
|
+
data: Data,
|
|
6
|
+
):
|
|
7
|
+
"""Make the variables for the createData query.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
data (Data): The data object what you want to create.
|
|
11
|
+
"""
|
|
12
|
+
return {
|
|
13
|
+
"datasetId": data.dataset_id,
|
|
14
|
+
"key": data.key,
|
|
15
|
+
"type": data.type.value,
|
|
16
|
+
"slices": data.slice_ids if data.slice_ids is not None else [],
|
|
17
|
+
"scene": [
|
|
18
|
+
{
|
|
19
|
+
"type": scene.type.value,
|
|
20
|
+
"content": {
|
|
21
|
+
"id": scene.content.id,
|
|
22
|
+
},
|
|
23
|
+
"meta": scene.meta,
|
|
24
|
+
}
|
|
25
|
+
for scene in data.scene
|
|
26
|
+
] if data.scene is not None else None,
|
|
27
|
+
"thumbnail": {
|
|
28
|
+
"id": data.thumbnail.id,
|
|
29
|
+
} if data.thumbnail is not None else None,
|
|
30
|
+
"annotation": {
|
|
31
|
+
"versions": [
|
|
32
|
+
{
|
|
33
|
+
"content": {
|
|
34
|
+
"id": version.content.id,
|
|
35
|
+
},
|
|
36
|
+
"meta": version.meta,
|
|
37
|
+
}
|
|
38
|
+
for version in data.annotation.versions
|
|
39
|
+
],
|
|
40
|
+
"meta": data.annotation.meta
|
|
41
|
+
} if data.annotation is not None else None,
|
|
42
|
+
"predictions": [
|
|
43
|
+
{
|
|
44
|
+
"setId": prediction.set_id,
|
|
45
|
+
"content": {
|
|
46
|
+
"id": prediction.content.id,
|
|
47
|
+
},
|
|
48
|
+
"meta": prediction.meta,
|
|
49
|
+
}
|
|
50
|
+
for prediction in data.predictions
|
|
51
|
+
] if data.predictions is not None else [],
|
|
52
|
+
"meta": [
|
|
53
|
+
{
|
|
54
|
+
"key": meta.key,
|
|
55
|
+
"type": meta.type.value,
|
|
56
|
+
"value": meta.value,
|
|
57
|
+
}
|
|
58
|
+
for meta in data.meta
|
|
59
|
+
] if data.meta is not None else None,
|
|
60
|
+
"systemMeta": [
|
|
61
|
+
{
|
|
62
|
+
"key": meta.key,
|
|
63
|
+
"type": meta.type.value,
|
|
64
|
+
"value": meta.value,
|
|
65
|
+
}
|
|
66
|
+
for meta in data.system_meta
|
|
67
|
+
] if data.system_meta is not None else None,
|
|
68
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
from spb_onprem.base_types import Undefined, UndefinedType
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_params(
|
|
6
|
+
dataset_id: str,
|
|
7
|
+
data_id: Union[str, UndefinedType] = Undefined,
|
|
8
|
+
data_key: Union[str, UndefinedType] = Undefined,
|
|
9
|
+
):
|
|
10
|
+
"""Make the variables for the data query.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
data_id (Union[str, UndefinedType], optional): The ID of the data. Defaults to Undefined.
|
|
14
|
+
data_key (Union[str, UndefinedType], optional): The key of the data. Defaults to Undefined.
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
dict: The variables for the data query.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
"dataset_id": dataset_id,
|
|
22
|
+
"key": data_key,
|
|
23
|
+
"id": data_id,
|
|
24
|
+
}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
from typing import (
|
|
2
|
+
Optional,
|
|
3
|
+
List
|
|
4
|
+
)
|
|
5
|
+
from spb_onprem.base_model import CustomBaseModel, Field
|
|
6
|
+
from spb_onprem.data.enums import DataType
|
|
7
|
+
from spb_onprem.exceptions import BadParameterError
|
|
8
|
+
|
|
9
|
+
class AnnotationFilter(CustomBaseModel):
|
|
10
|
+
type: Optional[str] = None
|
|
11
|
+
key: Optional[str] = None
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class DataFilterOptions(CustomBaseModel):
|
|
15
|
+
id_in: Optional[List[str]] = Field(None, alias="idIn")
|
|
16
|
+
slice_id: Optional[str] = Field(None, alias="sliceId")
|
|
17
|
+
slice_id_in: Optional[List[str]] = Field(None, alias="sliceIdIn")
|
|
18
|
+
key_contains: Optional[str] = Field(None, alias="keyContains")
|
|
19
|
+
key_matches: Optional[str] = Field(None, alias="keyMatches")
|
|
20
|
+
type_in: Optional[List[DataType]] = Field(None, alias="typeIn")
|
|
21
|
+
annotation_in: Optional[List[AnnotationFilter]] = Field(None, alias="annotationIn")
|
|
22
|
+
annotation_exists: Optional[bool] = Field(None, alias="annotationExists")
|
|
23
|
+
prediction_set_id_in: Optional[List[str]] = Field(None, alias="predictionSetIdIn")
|
|
24
|
+
prediction_set_id_exists: Optional[bool] = Field(None, alias="predictionSetIdExists")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DataListFilter(CustomBaseModel):
|
|
28
|
+
must_filter: Optional[DataFilterOptions] = Field(None, alias="must")
|
|
29
|
+
not_filter: Optional[DataFilterOptions] = Field(None, alias="not")
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def get_data_id_list_params(
|
|
33
|
+
dataset_id: str,
|
|
34
|
+
data_list_filter: Optional[DataListFilter] = None,
|
|
35
|
+
cursor: Optional[str] = None,
|
|
36
|
+
length: Optional[int] = 50,
|
|
37
|
+
):
|
|
38
|
+
"""Make the variables for the dataIdList query.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
dataset_id (str): The dataset id.
|
|
42
|
+
data_list_filter (Optional[DataListFilter], optional): The filter for the data list. Defaults to None.
|
|
43
|
+
cursor (Optional[str], optional): The cursor for the data list. Defaults to None.
|
|
44
|
+
length (Optional[int], optional): The length of the data list. Defaults to 50.
|
|
45
|
+
|
|
46
|
+
Raises:
|
|
47
|
+
BadParameterError: The maximum length is 200.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
dict: The variables for the dataIdList query.
|
|
51
|
+
"""
|
|
52
|
+
if length > 200:
|
|
53
|
+
raise BadParameterError("The maximum length is 200.")
|
|
54
|
+
|
|
55
|
+
return {
|
|
56
|
+
"dataset_id": dataset_id,
|
|
57
|
+
"filter": data_list_filter.model_dump(
|
|
58
|
+
by_alias=True, exclude_unset=True
|
|
59
|
+
) if data_list_filter else None,
|
|
60
|
+
"cursor": cursor,
|
|
61
|
+
"length": length
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def get_data_list_params(
|
|
66
|
+
dataset_id: str,
|
|
67
|
+
data_list_filter: Optional[DataListFilter] = None,
|
|
68
|
+
cursor: Optional[str] = None,
|
|
69
|
+
length: Optional[int] = 10,
|
|
70
|
+
):
|
|
71
|
+
"""Make the variables for the dataList query.
|
|
72
|
+
|
|
73
|
+
Args:
|
|
74
|
+
dataset_id (str): The dataset id.
|
|
75
|
+
data_list_filter (Optional[DataListFilter], optional): The filter for the data list. Defaults to None.
|
|
76
|
+
cursor (Optional[str], optional): The cursor for the data list. Defaults to None.
|
|
77
|
+
length (Optional[int], optional): The length of the data list. Defaults to 10.
|
|
78
|
+
|
|
79
|
+
Raises:
|
|
80
|
+
BadParameterError: The maximum length is 50.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
dict: The variables for the dataList query.
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
if length > 50:
|
|
87
|
+
raise BadParameterError("The maximum length is 50.")
|
|
88
|
+
|
|
89
|
+
return {
|
|
90
|
+
"dataset_id": dataset_id,
|
|
91
|
+
"filter": data_list_filter.model_dump(
|
|
92
|
+
by_alias=True, exclude_unset=True
|
|
93
|
+
) if data_list_filter else None,
|
|
94
|
+
"cursor": cursor,
|
|
95
|
+
"length": length
|
|
96
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def delete_annotation_version_params(
|
|
4
|
+
dataset_id: str,
|
|
5
|
+
data_id: str,
|
|
6
|
+
version_id: str,
|
|
7
|
+
):
|
|
8
|
+
"""Delete annotation version from selected data.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
dataset_id (str): dataset id which the data belongs to
|
|
12
|
+
data_id (str): data id to be deleted
|
|
13
|
+
version_id (str): annotation version id to be deleted
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
return {
|
|
17
|
+
"dataset_id": dataset_id,
|
|
18
|
+
"data_id": data_id,
|
|
19
|
+
"id": version_id,
|
|
20
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def delete_data_params(
|
|
4
|
+
dataset_id: str,
|
|
5
|
+
data_id: str,
|
|
6
|
+
):
|
|
7
|
+
"""Delete the data from dataset.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
dataset_id (str): the dataset id which the data belongs to.
|
|
11
|
+
data_id (str): the data id to delete.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
return {
|
|
15
|
+
"dataset_id": dataset_id,
|
|
16
|
+
"id": data_id,
|
|
17
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def delete_prediction_params (
|
|
4
|
+
dataset_id: str,
|
|
5
|
+
data_id: str,
|
|
6
|
+
set_id: str,
|
|
7
|
+
):
|
|
8
|
+
"""Delete prediction from selected data.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
dataset_id (str): dataset id which the data belongs to
|
|
12
|
+
data_id (str): data id to be deleted
|
|
13
|
+
set_id (str): set id to be deleted
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
dict: the params for graphql query
|
|
17
|
+
"""
|
|
18
|
+
return {
|
|
19
|
+
"dataset_id": dataset_id,
|
|
20
|
+
"data_id": data_id,
|
|
21
|
+
"set_id": set_id,
|
|
22
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def delete_scene_params(
|
|
4
|
+
dataset_id: str,
|
|
5
|
+
data_id: str,
|
|
6
|
+
scene_id: str,
|
|
7
|
+
):
|
|
8
|
+
"""Delete scene from selected data.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
dataset_id (str): dataset id which the data belongs to
|
|
12
|
+
data_id (str): data id to be deleted
|
|
13
|
+
scene_id (str): scene id to be deleted
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
dict: the params for graphql query
|
|
17
|
+
"""
|
|
18
|
+
return {
|
|
19
|
+
"dataset_id": dataset_id,
|
|
20
|
+
"data_id": data_id,
|
|
21
|
+
"scene_id": scene_id,
|
|
22
|
+
}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
from spb_onprem.data.entities import AnnotationVersion
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def insert_annotation_version_params(
|
|
5
|
+
dataset_id: str,
|
|
6
|
+
data_id: str,
|
|
7
|
+
version: AnnotationVersion,
|
|
8
|
+
):
|
|
9
|
+
"""Insert annotation version to selected data.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
dataset_id (str): dataset id which the data belongs to
|
|
13
|
+
data_id (str): data id to be inserted
|
|
14
|
+
annotation (AnnotationVersion): annotation version to be inserted
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
dict: the params for graphql query
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
return {
|
|
21
|
+
"dataset_id": dataset_id,
|
|
22
|
+
"data_id": data_id,
|
|
23
|
+
"version": {
|
|
24
|
+
"content": version.content.model_dump(
|
|
25
|
+
by_alias=True, exclude_unset=True
|
|
26
|
+
),
|
|
27
|
+
"meta": version.meta
|
|
28
|
+
},
|
|
29
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def insert_data_to_slice_params(
|
|
4
|
+
dataset_id: str,
|
|
5
|
+
data_id: str,
|
|
6
|
+
slice_id: str,
|
|
7
|
+
):
|
|
8
|
+
"""Insert data to selected slice.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
dataset_id (str): dataset id which the data belongs to
|
|
12
|
+
data_id (str): data id to be inserted
|
|
13
|
+
slice_id (str): slice id to be inserted
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
dict: the params for graphql query
|
|
17
|
+
"""
|
|
18
|
+
return {
|
|
19
|
+
"dataset_id": dataset_id,
|
|
20
|
+
"data_id": data_id,
|
|
21
|
+
"slice_id": slice_id,
|
|
22
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from spb_onprem.data.entities import Prediction
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def insert_prediction_params (
|
|
5
|
+
dataset_id: str,
|
|
6
|
+
data_id: str,
|
|
7
|
+
prediction: Prediction,
|
|
8
|
+
):
|
|
9
|
+
"""Insert prediction to selected data.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
dataset_id (str): dataset id which the data belongs to
|
|
13
|
+
data_id (str): data id to be inserted
|
|
14
|
+
prediction (Prediction): prediction to be inserted
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
dict: the params for graphql query
|
|
18
|
+
"""
|
|
19
|
+
return {
|
|
20
|
+
"dataset_id": dataset_id,
|
|
21
|
+
"data_id": data_id,
|
|
22
|
+
"prediction": prediction.model_dump(
|
|
23
|
+
by_alias=True, exclude_unset=True
|
|
24
|
+
),
|
|
25
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from spb_onprem.data.entities import Scene
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def insert_scene_params(
|
|
5
|
+
dataset_id: str,
|
|
6
|
+
data_id: str,
|
|
7
|
+
scene: Scene
|
|
8
|
+
):
|
|
9
|
+
"""Insert scene to selected data.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
dataset_id (str): dataset id which the data belongs to
|
|
13
|
+
data_id (str): data id to be inserted
|
|
14
|
+
scene (Scene): scene to be inserted
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
dict: the params for graphql query
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
if not isinstance(scene, Scene):
|
|
21
|
+
raise ValueError("scene must be an instance of Scene.")
|
|
22
|
+
else:
|
|
23
|
+
if scene.type is None:
|
|
24
|
+
raise ValueError("scene.type must be provided.")
|
|
25
|
+
|
|
26
|
+
return {
|
|
27
|
+
"dataset_id": dataset_id,
|
|
28
|
+
"data_id": data_id,
|
|
29
|
+
"scene": scene.model_dump(
|
|
30
|
+
by_alias=True, exclude_unset=True
|
|
31
|
+
),
|
|
32
|
+
}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
def remove_data_from_slice_params(
|
|
4
|
+
dataset_id: str,
|
|
5
|
+
data_id: str,
|
|
6
|
+
slice_id: str,
|
|
7
|
+
):
|
|
8
|
+
"""Insert data to selected slice.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
dataset_id (str): dataset id which the data belongs to
|
|
12
|
+
data_id (str): data id to be removed
|
|
13
|
+
slice_id (str): slice id to be removed
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
dict: the params for graphql query
|
|
17
|
+
"""
|
|
18
|
+
return {
|
|
19
|
+
"dataset_id": dataset_id,
|
|
20
|
+
"data_id": data_id,
|
|
21
|
+
"slice_id": slice_id,
|
|
22
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
from typing import (
|
|
2
|
+
List, Union, Optional
|
|
3
|
+
)
|
|
4
|
+
|
|
5
|
+
from spb_onprem.base_types import (
|
|
6
|
+
Undefined,
|
|
7
|
+
UndefinedType,
|
|
8
|
+
)
|
|
9
|
+
from spb_onprem.data.entities import (
|
|
10
|
+
DataMeta,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
def remove_data_meta_params(
|
|
14
|
+
dataset_id: str,
|
|
15
|
+
data_id: str,
|
|
16
|
+
meta: Union[
|
|
17
|
+
Optional[List[DataMeta]],
|
|
18
|
+
UndefinedType
|
|
19
|
+
] = Undefined,
|
|
20
|
+
system_meta: Union[
|
|
21
|
+
Optional[List[DataMeta]],
|
|
22
|
+
UndefinedType
|
|
23
|
+
] = Undefined,
|
|
24
|
+
):
|
|
25
|
+
"""Remove meta and system meta of the selected data.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
dataset_id (str): dataset id of the data to be removed
|
|
29
|
+
data_id (str): data id to be removed
|
|
30
|
+
meta (Union[ Optional[List[DataMeta]], UndefinedType ], optional): the meta to be deleted. Defaults to Undefined.
|
|
31
|
+
system_meta (Union[ Optional[List[DataMeta]], UndefinedType ], optional): the system meta to be deleted. Defaults to Undefined.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
dict: the params for graphql query
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
variables = {
|
|
38
|
+
"dataset_id": dataset_id,
|
|
39
|
+
"data_id": data_id,
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if meta is not Undefined:
|
|
43
|
+
if meta is not None and not isinstance(meta, list):
|
|
44
|
+
raise ValueError("meta must be a list of DataMeta or None.")
|
|
45
|
+
variables["meta"] = [
|
|
46
|
+
{
|
|
47
|
+
"key": item.key,
|
|
48
|
+
"type": item.type.value,
|
|
49
|
+
}
|
|
50
|
+
for item in meta
|
|
51
|
+
] if meta is not None else None
|
|
52
|
+
|
|
53
|
+
if system_meta is not Undefined:
|
|
54
|
+
if system_meta is not None and not isinstance(system_meta, list):
|
|
55
|
+
raise ValueError("system_meta must be a list of DataMeta or None.")
|
|
56
|
+
variables["system_meta"] = [
|
|
57
|
+
{
|
|
58
|
+
"key": item.key,
|
|
59
|
+
"type": item.type.value,
|
|
60
|
+
}
|
|
61
|
+
for item in system_meta
|
|
62
|
+
] if system_meta is not None else None
|
|
63
|
+
|
|
64
|
+
return variables
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from typing import Union
|
|
2
|
+
from spb_onprem.base_types import UndefinedType, Undefined
|
|
3
|
+
from spb_onprem.exceptions import BadParameterError
|
|
4
|
+
|
|
5
|
+
def update_annotation_params(
|
|
6
|
+
dataset_id: str,
|
|
7
|
+
data_id: str,
|
|
8
|
+
meta: Union[
|
|
9
|
+
dict,
|
|
10
|
+
UndefinedType,
|
|
11
|
+
],
|
|
12
|
+
):
|
|
13
|
+
"""Make the variables for the updateAnnotation query.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
dataset_id (str): The dataset ID of the data.
|
|
17
|
+
data_id (str): The ID of the data.
|
|
18
|
+
meta (dict): The meta of the data.
|
|
19
|
+
"""
|
|
20
|
+
variables = {
|
|
21
|
+
"dataset_id": dataset_id,
|
|
22
|
+
"data_id": data_id,
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if meta is not Undefined:
|
|
26
|
+
if meta is not None and not isinstance(meta, dict):
|
|
27
|
+
raise BadParameterError("meta must be a dict or None.")
|
|
28
|
+
variables["meta"] = meta
|
|
29
|
+
|
|
30
|
+
return variables
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from typing import (
|
|
2
|
+
Union,
|
|
3
|
+
Optional,
|
|
4
|
+
List
|
|
5
|
+
)
|
|
6
|
+
from spb_onprem.base_types import (
|
|
7
|
+
Undefined,
|
|
8
|
+
UndefinedType,
|
|
9
|
+
)
|
|
10
|
+
from spb_onprem.data.entities import (
|
|
11
|
+
DataMeta,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def update_params(
|
|
16
|
+
dataset_id: str,
|
|
17
|
+
data_id: str,
|
|
18
|
+
key: Union[
|
|
19
|
+
Optional[str],
|
|
20
|
+
UndefinedType
|
|
21
|
+
] = Undefined,
|
|
22
|
+
meta: Union[
|
|
23
|
+
Optional[List[DataMeta]],
|
|
24
|
+
UndefinedType
|
|
25
|
+
] = Undefined,
|
|
26
|
+
system_meta: Union[
|
|
27
|
+
Optional[List[DataMeta]],
|
|
28
|
+
UndefinedType
|
|
29
|
+
] = Undefined,
|
|
30
|
+
):
|
|
31
|
+
"""Make the variables for the updateData query.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
dataset_id (str): The dataset ID of the data.
|
|
35
|
+
id (str): The ID of the data.
|
|
36
|
+
key (str): The key of the data.
|
|
37
|
+
meta (List[DataMeta]): The meta of the data.
|
|
38
|
+
system_meta (List[DataMeta]): The system meta of the data.
|
|
39
|
+
"""
|
|
40
|
+
variables = {
|
|
41
|
+
"datasetId": dataset_id,
|
|
42
|
+
"id": data_id,
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if key is not Undefined:
|
|
46
|
+
variables["key"] = key
|
|
47
|
+
|
|
48
|
+
if meta is not Undefined:
|
|
49
|
+
if meta is not None and not isinstance(meta, list):
|
|
50
|
+
raise ValueError("meta must be a list of DataMeta or None.")
|
|
51
|
+
variables["meta"] = [
|
|
52
|
+
{
|
|
53
|
+
"key": meta.key,
|
|
54
|
+
"type": meta.type.value,
|
|
55
|
+
"value": meta.value,
|
|
56
|
+
}
|
|
57
|
+
for meta in meta
|
|
58
|
+
] if meta is not None else None
|
|
59
|
+
|
|
60
|
+
if system_meta is not Undefined:
|
|
61
|
+
if system_meta is not None and not isinstance(system_meta, list):
|
|
62
|
+
raise ValueError("meta must be a list of DataMeta or None.")
|
|
63
|
+
variables["systemMeta"] = [
|
|
64
|
+
{
|
|
65
|
+
"key": meta.key,
|
|
66
|
+
"type": meta.type.value,
|
|
67
|
+
"value": meta.value,
|
|
68
|
+
}
|
|
69
|
+
for meta in system_meta
|
|
70
|
+
] if system_meta is not None else None
|
|
71
|
+
|
|
72
|
+
return variables
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from spb_onprem.data.entities import Scene
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def update_scene_params(
|
|
5
|
+
dataset_id: str,
|
|
6
|
+
data_id: str,
|
|
7
|
+
scene: Scene
|
|
8
|
+
):
|
|
9
|
+
"""Update scene to selected data.
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
dataset_id (str): dataset id which the data belongs to
|
|
13
|
+
data_id (str): data id to be updated
|
|
14
|
+
scene (Scene): scene to be updated
|
|
15
|
+
|
|
16
|
+
Returns:
|
|
17
|
+
dict: the params for graphql query
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
if not isinstance(scene, Scene):
|
|
21
|
+
raise ValueError("scene must be an instance of Scene.")
|
|
22
|
+
else:
|
|
23
|
+
if scene.id is None:
|
|
24
|
+
raise ValueError("scene.id must be provided.")
|
|
25
|
+
if scene.type is None:
|
|
26
|
+
raise ValueError("scene.type must be provided.")
|
|
27
|
+
|
|
28
|
+
return {
|
|
29
|
+
"dataset_id": dataset_id,
|
|
30
|
+
"data_id": data_id,
|
|
31
|
+
"id": scene.id,
|
|
32
|
+
"scene": {
|
|
33
|
+
"content": scene.content,
|
|
34
|
+
"type": scene.type.value,
|
|
35
|
+
"meta": scene.meta,
|
|
36
|
+
},
|
|
37
|
+
}
|