superb-ai-onprem 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of superb-ai-onprem might be problematic. Click here for more details.

Files changed (72) hide show
  1. spb_onprem/__init__.py +74 -0
  2. spb_onprem/_version.py +21 -0
  3. spb_onprem/base_model.py +6 -0
  4. spb_onprem/base_service.py +164 -0
  5. spb_onprem/base_types.py +11 -0
  6. spb_onprem/contents/__init__.py +6 -0
  7. spb_onprem/contents/entities/__init__.py +8 -0
  8. spb_onprem/contents/entities/base_content.py +13 -0
  9. spb_onprem/contents/entities/content.py +17 -0
  10. spb_onprem/contents/queries.py +39 -0
  11. spb_onprem/contents/service.py +132 -0
  12. spb_onprem/data/__init__.py +6 -0
  13. spb_onprem/data/entities/__init__.py +15 -0
  14. spb_onprem/data/entities/annotation.py +25 -0
  15. spb_onprem/data/entities/data.py +28 -0
  16. spb_onprem/data/entities/data_meta.py +31 -0
  17. spb_onprem/data/entities/prediction.py +13 -0
  18. spb_onprem/data/entities/scene.py +14 -0
  19. spb_onprem/data/enums/__init__.py +10 -0
  20. spb_onprem/data/enums/data_meta_type.py +15 -0
  21. spb_onprem/data/enums/data_type.py +9 -0
  22. spb_onprem/data/enums/scene_type.py +10 -0
  23. spb_onprem/data/params/__init__.py +59 -0
  24. spb_onprem/data/params/create_data.py +68 -0
  25. spb_onprem/data/params/data.py +24 -0
  26. spb_onprem/data/params/data_list.py +96 -0
  27. spb_onprem/data/params/delete_annotation_version.py +20 -0
  28. spb_onprem/data/params/delete_data.py +17 -0
  29. spb_onprem/data/params/delete_prediction.py +22 -0
  30. spb_onprem/data/params/delete_scene.py +22 -0
  31. spb_onprem/data/params/insert_annotation_version.py +29 -0
  32. spb_onprem/data/params/insert_data_to_slice.py +22 -0
  33. spb_onprem/data/params/insert_prediction.py +25 -0
  34. spb_onprem/data/params/insert_scene.py +32 -0
  35. spb_onprem/data/params/remove_data_from_slice.py +22 -0
  36. spb_onprem/data/params/remove_data_meta.py +64 -0
  37. spb_onprem/data/params/update_annotation.py +30 -0
  38. spb_onprem/data/params/update_data.py +72 -0
  39. spb_onprem/data/params/update_scene.py +37 -0
  40. spb_onprem/data/params/upsert_data_meta.py +48 -0
  41. spb_onprem/data/queries.py +360 -0
  42. spb_onprem/data/service.py +524 -0
  43. spb_onprem/datasets/__init__.py +6 -0
  44. spb_onprem/datasets/entities/__init__.py +6 -0
  45. spb_onprem/datasets/entities/dataset.py +14 -0
  46. spb_onprem/datasets/params/__init__.py +11 -0
  47. spb_onprem/datasets/params/create_dataset.py +32 -0
  48. spb_onprem/datasets/params/dataset.py +26 -0
  49. spb_onprem/datasets/params/datasets.py +53 -0
  50. spb_onprem/datasets/params/update_dataset.py +39 -0
  51. spb_onprem/datasets/queries.py +79 -0
  52. spb_onprem/datasets/service.py +132 -0
  53. spb_onprem/exceptions.py +40 -0
  54. spb_onprem/slices/__init__.py +6 -0
  55. spb_onprem/slices/entities/__init__.py +5 -0
  56. spb_onprem/slices/entities/slice.py +17 -0
  57. spb_onprem/slices/params/__init__.py +23 -0
  58. spb_onprem/slices/params/create_slice.py +36 -0
  59. spb_onprem/slices/params/delete_slice.py +0 -0
  60. spb_onprem/slices/params/slice.py +42 -0
  61. spb_onprem/slices/params/slices.py +62 -0
  62. spb_onprem/slices/params/update_slice.py +45 -0
  63. spb_onprem/slices/queries.py +121 -0
  64. spb_onprem/slices/service.py +173 -0
  65. spb_onprem/users/__init__.py +0 -0
  66. spb_onprem/users/entities/__init__.py +5 -0
  67. spb_onprem/users/entities/auth.py +86 -0
  68. superb_ai_onprem-0.1.0.dist-info/METADATA +246 -0
  69. superb_ai_onprem-0.1.0.dist-info/RECORD +72 -0
  70. superb_ai_onprem-0.1.0.dist-info/WHEEL +5 -0
  71. superb_ai_onprem-0.1.0.dist-info/licenses/LICENSE +21 -0
  72. superb_ai_onprem-0.1.0.dist-info/top_level.txt +1 -0
spb_onprem/__init__.py ADDED
@@ -0,0 +1,74 @@
1
+ try:
2
+ from ._version import version as __version__
3
+ except ImportError:
4
+ __version__ = "0.0.0.dev0"
5
+
6
+ # Services
7
+ from .datasets.service import DatasetService
8
+ from .data.service import DataService
9
+ from .slices.service import SliceService
10
+
11
+ # Core Entities
12
+ from .data.entities import (
13
+ Data,
14
+ Scene,
15
+ Annotation,
16
+ AnnotationVersion,
17
+ Prediction,
18
+ DataMeta,
19
+ )
20
+ from .datasets.entities import Dataset
21
+ from .slices.entities import Slice
22
+
23
+ # Enums
24
+ from .data.enums import (
25
+ DataType,
26
+ SceneType,
27
+ DataMetaTypes,
28
+ DataMetaValue,
29
+ )
30
+
31
+ # Filters
32
+ from .data.params.data_list import (
33
+ AnnotationFilter,
34
+ DataListFilter,
35
+ DataFilterOptions,
36
+ )
37
+ from .datasets.params.datasets import (
38
+ DatasetsFilter,
39
+ DatasetsFilterOptions,
40
+ )
41
+ from .slices.params.slices import (
42
+ SlicesFilterOptions,
43
+ )
44
+
45
+ __all__ = (
46
+ # Services
47
+ "DatasetService",
48
+ "DataService",
49
+ "SliceService",
50
+
51
+ # Core Entities
52
+ "Data",
53
+ "Scene",
54
+ "Annotation",
55
+ "AnnotationVersion",
56
+ "Prediction",
57
+ "DataMeta",
58
+ "Dataset",
59
+ "Slice",
60
+
61
+ # Enums
62
+ "DataType",
63
+ "SceneType",
64
+ "DataMetaTypes",
65
+ "DataMetaValue",
66
+
67
+ # Filters
68
+ "AnnotationFilter",
69
+ "DataListFilter",
70
+ "DataFilterOptions",
71
+ "DatasetsFilter",
72
+ "DatasetsFilterOptions",
73
+ "SlicesFilterOptions",
74
+ )
spb_onprem/_version.py ADDED
@@ -0,0 +1,21 @@
1
+ # file generated by setuptools-scm
2
+ # don't change, don't track in version control
3
+
4
+ __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
5
+
6
+ TYPE_CHECKING = False
7
+ if TYPE_CHECKING:
8
+ from typing import Tuple
9
+ from typing import Union
10
+
11
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
12
+ else:
13
+ VERSION_TUPLE = object
14
+
15
+ version: str
16
+ __version__: str
17
+ __version_tuple__: VERSION_TUPLE
18
+ version_tuple: VERSION_TUPLE
19
+
20
+ __version__ = version = '0.1.0'
21
+ __version_tuple__ = version_tuple = (0, 1, 0)
@@ -0,0 +1,6 @@
1
+ from pydantic import BaseModel, Field
2
+
3
+
4
+ class CustomBaseModel(BaseModel):
5
+ class Config:
6
+ populate_by_name = True
@@ -0,0 +1,164 @@
1
+ from typing import Optional, Dict, Any, ClassVar
2
+ import random
3
+
4
+ import requests
5
+ from requests.adapters import HTTPAdapter
6
+ from urllib3.util import Retry
7
+
8
+ from spb_onprem.users.entities import AuthUser
9
+ from spb_onprem.exceptions import (
10
+ NotFoundError,
11
+ UnknownError,
12
+ BadResponseError,
13
+ BadRequestError,
14
+ BaseSDKError,
15
+ BadRequestParameterError,
16
+ RequestError,
17
+ ResponseError,
18
+ )
19
+
20
+ class RetryWithJitter(Retry):
21
+ def get_backoff_time(self):
22
+ base_backoff = super().get_backoff_time()
23
+ jitter = base_backoff * random.uniform(0.5, 1.5)
24
+ return jitter
25
+
26
+
27
+ class BaseService():
28
+ """The BaseService class is an abstract base class that defines the interface for services that handle data operations.
29
+ """
30
+ _retry_session: ClassVar[Optional[requests.Session]] = None
31
+ _auth_user: Optional[AuthUser] = None
32
+
33
+ def __init__(self):
34
+ self._auth_user = AuthUser.get_instance()
35
+ self.endpoint = f"{self._auth_user.host}/graphql/"
36
+
37
+ @classmethod
38
+ def requests_retry_session(
39
+ cls,
40
+ retries=5,
41
+ backoff_factor=2,
42
+ status_forcelist=(500, 502, 504),
43
+ session=None,
44
+ allowed_methods=[
45
+ 'GET',
46
+ 'POST',
47
+ 'PUT',
48
+ 'DELETE',
49
+ 'OPTIONS',
50
+ 'HEAD',
51
+ 'PATCH',
52
+ 'TRACE',
53
+ 'CONNECT'
54
+ ]
55
+ ) -> requests.Session:
56
+ if BaseService._retry_session is None:
57
+ session = session or requests.Session()
58
+ retry = RetryWithJitter(
59
+ total=retries,
60
+ read=retries,
61
+ connect=retries,
62
+ backoff_factor=backoff_factor,
63
+ status_forcelist=status_forcelist,
64
+ allowed_methods=frozenset(allowed_methods),
65
+ )
66
+ adapter = HTTPAdapter(max_retries=retry)
67
+ session.mount('http://', adapter)
68
+ session.mount('https://', adapter)
69
+ BaseService._retry_session = session
70
+ return BaseService._retry_session
71
+
72
+ def request_gql(self, query: Any, variables: Dict[str, Any]):
73
+ """Request Graphql query to the server."""
74
+ payload = {
75
+ "query": query["query"],
76
+ "variables": variables
77
+ }
78
+
79
+ # Create a new session for each request
80
+ session = self.requests_retry_session()
81
+
82
+ try:
83
+ response = session.post(
84
+ self.endpoint,
85
+ json=payload,
86
+ headers=self._auth_user.auth_headers
87
+ )
88
+ response.raise_for_status()
89
+
90
+ result = response.json()
91
+ if not isinstance(result, dict):
92
+ raise BadRequestError(f"Invalid response format: {type(result).__name__}, expected dict")
93
+
94
+ # Check for GraphQL errors
95
+ if 'errors' in result and result['errors']:
96
+ for error in result['errors']:
97
+ if error['code'] == 'NOT_FOUND':
98
+ raise NotFoundError(error['message'])
99
+ error_messages = [error.get('message', 'Unknown error') for error in result['errors']]
100
+ raise UnknownError(f"GraphQL errors: {', '.join(error_messages)}")
101
+
102
+ # Validate response structure
103
+ if 'data' not in result:
104
+ raise BadResponseError("Missing 'data' field in response")
105
+
106
+ query_name = query.get("name")
107
+ if not query_name:
108
+ raise BadResponseError("Missing query name in query object")
109
+
110
+ # Handle different response structures
111
+ data = result['data']
112
+
113
+ # For other queries, expect the query name to be directly in data
114
+ if query_name not in data:
115
+ raise BadResponseError(f"Missing '{query_name}' in response data")
116
+
117
+ return data[query_name]
118
+
119
+ except requests.exceptions.RequestException as e:
120
+ raise BadResponseError(f"HTTP request failed: {str(e)}") from e
121
+ except BaseSDKError as e:
122
+ raise e
123
+ except Exception as e:
124
+ raise ResponseError(f"Unexpected error: {str(e)}") from e
125
+ finally:
126
+ # Close the session after use
127
+ session.close()
128
+
129
+ def request(
130
+ self,
131
+ method: str,
132
+ url: str,
133
+ headers: Optional[dict] = None,
134
+ params: Optional[dict] = None,
135
+ data: Optional[dict] = None,
136
+ json_data: Optional[dict] = None,
137
+ timeout: int = 30
138
+ ):
139
+ session = self.requests_retry_session()
140
+ try:
141
+ response = session.request(
142
+ method=method.upper(),
143
+ url=url,
144
+ headers={
145
+ **headers,
146
+ **self._auth_user.auth_headers
147
+ },
148
+ params=params,
149
+ data=data,
150
+ json=json_data,
151
+ timeout=timeout
152
+ )
153
+ response.raise_for_status()
154
+ return response
155
+
156
+ except requests.exceptions.RequestException as e:
157
+ print(f"An error occurred during the HTTP request: {str(e)}")
158
+ raise BadRequestError(f"HTTP request failed: {str(e)}") from e
159
+ except ValueError:
160
+ raise BadRequestParameterError("Failed to parse the HTTP response as JSON.") from e
161
+ except Exception as e:
162
+ raise RequestError(f"An error occurred while processing the HTTP response: {str(e)}") from e
163
+ finally:
164
+ session.close()
@@ -0,0 +1,11 @@
1
+
2
+
3
+ class UndefinedType():
4
+ """A singleton type used to represent an undefined value."""
5
+ def __repr__(self):
6
+ return "Undefined"
7
+
8
+ def __bool__(self):
9
+ return False # Ensures it evaluates as False in boolean contexts
10
+
11
+ Undefined = UndefinedType()
@@ -0,0 +1,6 @@
1
+ from .service import ContentService
2
+
3
+
4
+ __all__ = (
5
+ "ContentService",
6
+ )
@@ -0,0 +1,8 @@
1
+ from .base_content import BaseContent
2
+ from .content import Content
3
+
4
+
5
+ __all__ = (
6
+ "BaseContent",
7
+ "Content",
8
+ )
@@ -0,0 +1,13 @@
1
+ from typing import Optional
2
+
3
+ from spb_onprem.base_model import CustomBaseModel, Field
4
+
5
+ class BaseContent(CustomBaseModel):
6
+ """The content.
7
+ This is the actual file that is stored in the file storage.
8
+
9
+ Args:
10
+ BaseModel (_type_): _description_
11
+ """
12
+ id: str
13
+ download_url:Optional[str] = Field(None, alias="downloadURL")
@@ -0,0 +1,17 @@
1
+ from typing import Optional
2
+
3
+ from spb_onprem.base_model import CustomBaseModel, Field
4
+
5
+ class Content(CustomBaseModel):
6
+ """The content.
7
+ This is the actual file that is stored in the file storage.
8
+
9
+ Args:
10
+ BaseModel (_type_): _description_
11
+ """
12
+ id: str
13
+ download_url:Optional[str] = Field(None, alias="downloadURL")
14
+ key: Optional[str] = None
15
+ location: Optional[dict] = None
16
+ created_at: Optional[str] = Field(None, alias="createdAt")
17
+ created_by: Optional[str] = Field(None, alias="createdBy")
@@ -0,0 +1,39 @@
1
+ from typing import (
2
+ Union,
3
+ )
4
+
5
+ from spb_onprem.base_types import (
6
+ Undefined,
7
+ UndefinedType,
8
+ )
9
+
10
+ class Queries:
11
+ """ Content Queries """
12
+
13
+ @staticmethod
14
+ def create_variables(
15
+ key: Union[str, UndefinedType] = Undefined,
16
+ ):
17
+ if key is not Undefined:
18
+ return {"key": key}
19
+ else:
20
+ return {}
21
+
22
+ CREATE = {
23
+ "name": "createContent",
24
+ "query": '''
25
+ mutation CreateContent($key: String) {
26
+ createContent(key: $key) {
27
+ content {
28
+ id
29
+ key
30
+ location
31
+ createdAt
32
+ createdBy
33
+ }
34
+ uploadURL
35
+ }
36
+ }
37
+ ''',
38
+ "variables": create_variables
39
+ }
@@ -0,0 +1,132 @@
1
+ import mimetypes
2
+ import requests
3
+ import json
4
+
5
+ from io import BytesIO
6
+ from typing import Optional, Union
7
+
8
+ from spb_onprem.base_service import BaseService
9
+ from spb_onprem.base_types import (
10
+ Undefined,
11
+ UndefinedType,
12
+ )
13
+ from .entities import Content, BaseContent
14
+ from .queries import Queries
15
+
16
+
17
+
18
+ class ContentService(BaseService):
19
+ """The content service for the SDK.
20
+ Content service is the service that handles the content operations.
21
+ """
22
+
23
+ def upload_content(
24
+ self,
25
+ file_path: str,
26
+ key: Union[
27
+ str,
28
+ UndefinedType
29
+ ] = Undefined,
30
+ ):
31
+ '''
32
+ Uploads the content to the server.
33
+ Args:
34
+ file_path (str):
35
+ The path of the file to be uploaded.
36
+ You must provide the full path of the file (with extensions).
37
+ '''
38
+ with open(file_path, 'rb') as f:
39
+ file = f.read()
40
+ response = self.request_gql(
41
+ query=Queries.CREATE,
42
+ variables=Queries.CREATE["variables"](key)
43
+ )
44
+ upload_url = response['uploadURL']
45
+
46
+ self.request(
47
+ method="PUT",
48
+ url=upload_url,
49
+ headers={
50
+ 'Content-Type': mimetypes.guess_type(file_path)[0]
51
+ },
52
+ data=file,
53
+ )
54
+ content = response['content']
55
+ return BaseContent.model_validate(content)
56
+
57
+ def upload_json_content(
58
+ self,
59
+ data: dict,
60
+ key: Union[
61
+ str,
62
+ UndefinedType
63
+ ] = Undefined,
64
+ ):
65
+ '''
66
+ Uploads the JSON content to the server.
67
+
68
+ Args:
69
+ data (dict):
70
+ The JSON data to be uploaded.
71
+ key (Optional[str]):
72
+ An optional key to associate with the uploaded content.
73
+ '''
74
+ response = self.request_gql(
75
+ query=Queries.CREATE,
76
+ variables=Queries.CREATE["variables"](key) if key else None
77
+ )
78
+ upload_url = response['uploadURL']
79
+ self.request(
80
+ method="PUT",
81
+ url=upload_url,
82
+ headers={
83
+ 'Content-Type': 'application/json'
84
+ },
85
+ json_data=data,
86
+ )
87
+ content = response['content']
88
+ return BaseContent.model_validate(content)
89
+
90
+ def upload_content_with_data(
91
+ self,
92
+ file_data: BytesIO,
93
+ content_type: str,
94
+ key: Union[
95
+ str,
96
+ UndefinedType
97
+ ] = Undefined,
98
+ ):
99
+ '''
100
+ Uploads the content to the server.
101
+
102
+ Args:
103
+ file_data (BytesIO):
104
+ The file data to be uploaded.
105
+ content_type (str):
106
+ The MIME type of the file being uploaded (e.g., "image/jpeg").
107
+ key (Optional[str]):
108
+ An optional key to associate with the uploaded content.
109
+ '''
110
+ # Reset the BytesIO pointer to the beginning
111
+ file_data.seek(0)
112
+
113
+ # Request to get the upload URL
114
+ response = self.request_gql(
115
+ query=Queries.CREATE,
116
+ variables=Queries.CREATE["variables"](key)
117
+ )
118
+ upload_url = response['uploadURL']
119
+
120
+ # Upload the file data using the PUT request
121
+ self.request(
122
+ method="PUT",
123
+ url=upload_url,
124
+ headers={
125
+ 'Content-Type': content_type
126
+ },
127
+ data=file_data.read(),
128
+ )
129
+
130
+ # Retrieve the uploaded content details
131
+ content = response['content']
132
+ return BaseContent.model_validate(content)
@@ -0,0 +1,6 @@
1
+ from .service import DataService
2
+
3
+
4
+ __all__ = (
5
+ "DataService",
6
+ )
@@ -0,0 +1,15 @@
1
+ from .annotation import Annotation, AnnotationVersion
2
+ from .data_meta import DataMeta
3
+ from .data import Data
4
+ from .prediction import Prediction
5
+ from .scene import Scene
6
+
7
+
8
+ __all__ = (
9
+ "Data",
10
+ "Scene",
11
+ "Annotation",
12
+ "AnnotationVersion",
13
+ "Prediction",
14
+ "DataMeta",
15
+ )
@@ -0,0 +1,25 @@
1
+ from typing import Optional, List
2
+
3
+ from spb_onprem.base_model import CustomBaseModel
4
+ from spb_onprem.contents.entities import BaseContent
5
+
6
+
7
+ class AnnotationVersion(CustomBaseModel):
8
+ """
9
+ The version of the annotation.
10
+ Annotation version is the version of the data annotation.
11
+ This has the content of the data annotation.
12
+ """
13
+ id: Optional[str] = None
14
+ content: Optional[BaseContent] = None
15
+ meta: Optional[dict] = None
16
+
17
+
18
+ class Annotation(CustomBaseModel):
19
+ """
20
+ The annotation of the data.
21
+ Annotation has the versions of the data annotation.
22
+ """
23
+ versions: Optional[List[AnnotationVersion]] = None
24
+ meta: Optional[dict] = None
25
+
@@ -0,0 +1,28 @@
1
+ from typing import List, Optional
2
+ from spb_onprem.base_model import CustomBaseModel, Field
3
+ from spb_onprem.data.enums import DataType
4
+ from .scene import Scene
5
+ from .annotation import Annotation
6
+ from .prediction import Prediction
7
+ from .data_meta import DataMeta
8
+ from spb_onprem.contents.entities import BaseContent
9
+
10
+ class Data(CustomBaseModel):
11
+ """
12
+ THE DATA.
13
+ """
14
+ id: Optional[str] = None
15
+ dataset_id: Optional[str] = Field(None, alias="datasetId")
16
+ slice_ids: Optional[List[str]] = Field(None, alias="sliceIds")
17
+ key: Optional[str] = None
18
+ type: Optional[DataType] = None
19
+ scene: Optional[List[Scene]] = None
20
+ thumbnail: Optional[BaseContent] = None
21
+ annotation: Optional[Annotation] = None
22
+ predictions: Optional[List[Prediction]] = None
23
+ meta: Optional[List[DataMeta]] = None
24
+ system_meta: Optional[List[DataMeta]] = Field(None, alias="systemMeta")
25
+ created_at: Optional[str] = Field(None, alias="createdAt")
26
+ created_by: Optional[str] = Field(None, alias="createdBy")
27
+ updated_at: Optional[str] = Field(None, alias="updatedAt")
28
+ updated_by: Optional[str] = Field(None, alias="updatedBy")
@@ -0,0 +1,31 @@
1
+ from typing import Optional, Union
2
+ from datetime import datetime
3
+ from spb_onprem.data.enums import DataMetaTypes, DataMetaValue
4
+ from spb_onprem.base_model import CustomBaseModel
5
+
6
+
7
+ class DataMeta(CustomBaseModel):
8
+ """
9
+ The meta of the data.
10
+ Meta is the metadata of the data.
11
+ """
12
+ key: Optional[str] = None
13
+ type: Optional[DataMetaTypes] = None
14
+ value: Optional[DataMetaValue] = None
15
+
16
+ @classmethod
17
+ def from_dict(cls, meta: dict[str, DataMetaValue]) -> list["DataMeta"]:
18
+ return [
19
+ cls(
20
+ key=key,
21
+ value=val,
22
+ type=(
23
+ DataMetaTypes.BOOLEAN if isinstance(val, bool) else
24
+ DataMetaTypes.NUMBER if isinstance(val, (int, float)) else
25
+ DataMetaTypes.DATETIME if isinstance(val, datetime) else
26
+ DataMetaTypes.STRING if isinstance(val, str) else
27
+ DataMetaTypes.ANNOTATION # list or dict
28
+ ),
29
+ )
30
+ for key, val in meta.items()
31
+ ]
@@ -0,0 +1,13 @@
1
+ from typing import Optional
2
+ from spb_onprem.base_model import CustomBaseModel, Field
3
+ from spb_onprem.contents.entities import BaseContent
4
+
5
+
6
+ class Prediction(CustomBaseModel):
7
+ """
8
+ The prediction of the data.
9
+ Prediction has the predictions of the data.
10
+ """
11
+ set_id: Optional[str] = Field(None, alias="setId")
12
+ content: Optional[BaseContent] = None
13
+ meta: Optional[dict] = None
@@ -0,0 +1,14 @@
1
+ from typing import Optional
2
+ from spb_onprem.base_model import CustomBaseModel
3
+ from spb_onprem.data.enums import SceneType
4
+ from spb_onprem.contents.entities import BaseContent
5
+
6
+ class Scene(CustomBaseModel):
7
+ """
8
+ The scene of the data.
9
+ Scene is the representation of the file of the data.
10
+ """
11
+ id: Optional[str] = None
12
+ type: Optional[SceneType] = None
13
+ content: Optional[BaseContent] = None
14
+ meta: Optional[dict] = None
@@ -0,0 +1,10 @@
1
+ from .data_meta_type import DataMetaTypes, DataMetaValue
2
+ from .data_type import DataType
3
+ from .scene_type import SceneType
4
+
5
+ __all__ = (
6
+ "DataType",
7
+ "SceneType",
8
+ "DataMetaTypes",
9
+ "DataMetaValue",
10
+ )
@@ -0,0 +1,15 @@
1
+ from enum import Enum
2
+ from typing import Union
3
+ from datetime import datetime
4
+
5
+
6
+ class DataMetaTypes(str, Enum):
7
+ """The data meta types."""
8
+ STRING = "String"
9
+ NUMBER = "Number"
10
+ BOOLEAN = "Boolean"
11
+ DATETIME = "DateTime"
12
+ ANNOTATION = "Annotation"
13
+
14
+
15
+ DataMetaValue = Union[str, int, float, bool, datetime, dict, list]
@@ -0,0 +1,9 @@
1
+ from enum import Enum
2
+
3
+
4
+ class DataType(str, Enum):
5
+ """
6
+ The type of the data.
7
+ This is used to determine the type of the data.
8
+ """
9
+ SUPERB_IMAGE = "SUPERB_IMAGE"