superb-ai-onprem 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of superb-ai-onprem might be problematic. Click here for more details.

@@ -0,0 +1,224 @@
1
+ from typing import Optional, List, Union
2
+
3
+ from spb_onprem.base_service import BaseService
4
+ from spb_onprem.base_types import (
5
+ Undefined,
6
+ UndefinedType,
7
+ )
8
+ from spb_onprem.data.params import DataListFilter
9
+
10
+ from .entities import Export
11
+ from .params import (
12
+ ExportFilter,
13
+ )
14
+ from .queries import Queries
15
+
16
+
17
+ class ExportService(BaseService):
18
+ """Service class for handling export-related operations."""
19
+
20
+ def create_export(
21
+ self,
22
+ dataset_id: str,
23
+ name: Union[
24
+ UndefinedType,
25
+ str
26
+ ] = Undefined,
27
+ data_filter: Union[
28
+ UndefinedType,
29
+ DataListFilter,
30
+ dict
31
+ ] = Undefined,
32
+ data_count: Union[
33
+ UndefinedType,
34
+ int
35
+ ] = Undefined,
36
+ frame_count: Union[
37
+ UndefinedType,
38
+ int
39
+ ] = Undefined,
40
+ annotation_count: Union[
41
+ UndefinedType,
42
+ int
43
+ ] = Undefined,
44
+ meta: Union[
45
+ UndefinedType,
46
+ dict
47
+ ] = Undefined,
48
+ ) -> Export:
49
+ """Create an export.
50
+
51
+ Args:
52
+ dataset_id (str): The ID of the dataset to create the export for.
53
+ name (Optional[str]): The name of the export.
54
+ data_filter (Optional[DataListFilter | dict]): The search filter of the data.
55
+ data_count (Optional[int]): The number of data items to export.
56
+ frame_count (Optional[int]): The number of frames to export.
57
+ annotation_count (Optional[int]): The number of annotations to export.
58
+ meta (Optional[dict]): The meta information for the export.
59
+ """
60
+ response = self.request_gql(
61
+ Queries.CREATE_EXPORT,
62
+ Queries.CREATE_EXPORT["variables"](
63
+ dataset_id=dataset_id,
64
+ name=name,
65
+ data_filter=data_filter,
66
+ data_count=data_count,
67
+ frame_count=frame_count,
68
+ annotation_count=annotation_count,
69
+ meta=meta,
70
+ )
71
+ )
72
+ return Export.model_validate(response)
73
+
74
+ def get_exports(
75
+ self,
76
+ dataset_id: str,
77
+ export_filter: Optional[ExportFilter] = None,
78
+ cursor: Optional[str] = None,
79
+ length: int = 10
80
+ ) -> tuple[List[Export], Optional[str], int]:
81
+ """Get exports.
82
+
83
+ Args:
84
+ dataset_id (str): The ID of the dataset to get exports for.
85
+ export_filter (Optional[ExportsFilterOptions]): The filter to apply to the exports.
86
+ cursor (Optional[str]): The cursor to use for pagination.
87
+ length (int): The number of exports to get.
88
+
89
+ Returns:
90
+ tuple[List[Export], Optional[str], int]: A tuple containing the exports, the next cursor, and the total count of exports.
91
+ """
92
+ response = self.request_gql(
93
+ Queries.GET_EXPORTS,
94
+ Queries.GET_EXPORTS["variables"](
95
+ dataset_id=dataset_id,
96
+ export_filter=export_filter,
97
+ cursor=cursor,
98
+ length=length,
99
+ )
100
+ )
101
+ exports_dict = response.get("exports", [])
102
+ return (
103
+ [Export.model_validate(export_dict) for export_dict in exports_dict],
104
+ response.get("next"),
105
+ response.get("totalCount"),
106
+ )
107
+
108
+ def get_export(
109
+ self,
110
+ dataset_id: str,
111
+ export_id: str,
112
+ ) -> Export:
113
+ """Get an export.
114
+
115
+ Args:
116
+ dataset_id (str): The ID of the dataset to get the export for.
117
+ export_id (str): The ID of the export to get.
118
+
119
+ Returns:
120
+ Export: The export object.
121
+ """
122
+ response = self.request_gql(
123
+ Queries.GET_EXPORT,
124
+ Queries.GET_EXPORT["variables"](
125
+ dataset_id=dataset_id,
126
+ export_id=export_id,
127
+ )
128
+ )
129
+ return Export.model_validate(response)
130
+
131
+ def update_export(
132
+ self,
133
+ dataset_id: str,
134
+ export_id: str,
135
+ location: Union[
136
+ UndefinedType,
137
+ str
138
+ ] = Undefined,
139
+ name: Union[
140
+ UndefinedType,
141
+ str
142
+ ] = Undefined,
143
+ data_filter: Union[
144
+ UndefinedType,
145
+ DataListFilter,
146
+ dict
147
+ ] = Undefined,
148
+ data_count: Union[
149
+ UndefinedType,
150
+ int
151
+ ] = Undefined,
152
+ frame_count: Union[
153
+ UndefinedType,
154
+ int
155
+ ] = Undefined,
156
+ annotation_count: Union[
157
+ UndefinedType,
158
+ int
159
+ ] = Undefined,
160
+ meta: Union[
161
+ UndefinedType,
162
+ dict
163
+ ] = Undefined,
164
+ completed_at: Union[
165
+ UndefinedType,
166
+ str
167
+ ] = Undefined,
168
+ ) -> Export:
169
+ """Update an export.
170
+
171
+ Args:
172
+ dataset_id (str): The ID of the dataset to update the export for.
173
+ export_id (str): The ID of the export to update.
174
+ location (Optional[str]): The location where the export will be stored.
175
+ name (Optional[str]): The name of the export.
176
+ data_filter (Optional[DataListFilter | dict]): The search filter of the data.
177
+ data_count (Optional[int]): The number of data items to export.
178
+ frame_count (Optional[int]): The number of frames to export.
179
+ annotation_count (Optional[int]): The number of annotations to export.
180
+ meta (Optional[dict]): The meta information for the export.
181
+ completed_at (Optional[str]): The completed time of the export.
182
+
183
+ Returns:
184
+ Export: The updated export object.
185
+ """
186
+ response = self.request_gql(
187
+ Queries.UPDATE_EXPORT,
188
+ Queries.UPDATE_EXPORT["variables"](
189
+ dataset_id=dataset_id,
190
+ export_id=export_id,
191
+ location=location,
192
+ name=name,
193
+ data_filter=data_filter,
194
+ data_count=data_count,
195
+ frame_count=frame_count,
196
+ annotation_count=annotation_count,
197
+ meta=meta,
198
+ completed_at=completed_at,
199
+ )
200
+ )
201
+ return Export.model_validate(response)
202
+
203
+ def delete_export(
204
+ self,
205
+ dataset_id: str,
206
+ export_id: str,
207
+ ) -> bool:
208
+ """Delete an export.
209
+
210
+ Args:
211
+ dataset_id (str): The ID of the dataset to delete the export for.
212
+ export_id (str): The ID of the export to delete.
213
+
214
+ Returns:
215
+ bool: True if the export was deleted, False otherwise.
216
+ """
217
+ response = self.request_gql(
218
+ Queries.DELETE_EXPORT,
219
+ Queries.DELETE_EXPORT["variables"](
220
+ dataset_id=dataset_id,
221
+ export_id=export_id,
222
+ )
223
+ )
224
+ return response
spb_onprem/searches.py CHANGED
@@ -16,6 +16,10 @@ from .activities.params.activities import (
16
16
  ActivitiesFilter,
17
17
  ActivitiesFilterOptions,
18
18
  )
19
+ from .exports.params.exports import (
20
+ ExportFilter,
21
+ ExportFilterOptions,
22
+ )
19
23
 
20
24
  __all__ = [
21
25
  "AnnotationFilter",
@@ -27,4 +31,6 @@ __all__ = [
27
31
  "SlicesFilterOptions",
28
32
  "ActivitiesFilter",
29
33
  "ActivitiesFilterOptions",
34
+ "ExportFilter",
35
+ "ExportFilterOptions",
30
36
  ]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: superb-ai-onprem
3
- Version: 0.2.0
3
+ Version: 0.3.0
4
4
  Summary: Python SDK for Superb AI On-premise
5
5
  Home-page: https://github.com/Superb-AI-Suite/superb-ai-onprem-python
6
6
  Author: Superb AI
@@ -1,11 +1,11 @@
1
- spb_onprem/__init__.py,sha256=X2jDAWU5Dp2lEfA6fCSHZbcwEyzMZJ0gS92Um9QnZlU,1586
2
- spb_onprem/_version.py,sha256=iB5DfB5V6YB5Wo4JmvS-txT42QtmGaWcWp3udRT7zCI,511
1
+ spb_onprem/__init__.py,sha256=Iucr68oU5pQ3lhHlyI36HUaReA7gsanGuLCSAVZh7GI,1761
2
+ spb_onprem/_version.py,sha256=AGmG_Lx0-9ztFw_7d9mYbaYuC-2abxE1oXOUNAY29YY,511
3
3
  spb_onprem/base_model.py,sha256=XLtyoxRBs68LrvbFH8V4EvQGPc2W17koC310MnS37jc,442
4
4
  spb_onprem/base_service.py,sha256=dPfr3mGXYlqadOXycu6RBFX1HcZ1qzEsskLoOxERLOU,5737
5
5
  spb_onprem/base_types.py,sha256=5HO6uy6qf08b4KSElwIaGy7UkoQG2KqVO6gcHbsqqSo,269
6
- spb_onprem/entities.py,sha256=Z3MA-W8jw-qYaCXbiMiYJxpqLUBoGVVtZpMcKsz2UJk,776
6
+ spb_onprem/entities.py,sha256=VFwaSdHIVB6NhxX5-pFRNPzCqVLNEZS-O2JETIYMopk,823
7
7
  spb_onprem/exceptions.py,sha256=jx5rTGsVZ5shOdbgQzk8GcSyMWFtb_3xhPq6Sylwc5o,478
8
- spb_onprem/searches.py,sha256=JxeVkASLFtVsJYY7lmxBoDDAXcFgrj-B6HPOT2MY2tA,620
8
+ spb_onprem/searches.py,sha256=VRj9vpvKU8iOdBJUA6BwfvOjUbWrKBo9fPJvNlRmKr0,750
9
9
  spb_onprem/activities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  spb_onprem/activities/queries.py,sha256=iXuNVsJuw8yt9QNw8jEBXpUnGLD6LDC1v2_jBBgsmXs,5626
11
11
  spb_onprem/activities/service.py,sha256=DI68qkDrvpm1NW_9BzWc37kSzyedfx4xLHqMeyDjp3A,10554
@@ -37,7 +37,7 @@ spb_onprem/data/entities/prediction.py,sha256=Eb2ldNSezeYDnaLQOfC65XWoDGJ0snlvlc
37
37
  spb_onprem/data/entities/scene.py,sha256=SJgr5UnGxktyaKjU8FYDaIQmsu7xSJftJOiCgq9uSmo,446
38
38
  spb_onprem/data/enums/__init__.py,sha256=IJWaapwesyIiIjuAykZc5fXdMXK2_IiOBa7qNY5cCNk,213
39
39
  spb_onprem/data/enums/data_meta_type.py,sha256=9rd12-7C1udbbIGvnuGURKmd5-lndtW7oWQAQwKSf_Q,335
40
- spb_onprem/data/enums/data_type.py,sha256=S7sbKHtJC_pvhNxLt8xtSFO3edo0Q9c7pZ3UyEeCeVA,178
40
+ spb_onprem/data/enums/data_type.py,sha256=BXcmeuWL-y4yIAswI_zsYqH9ozEmgl6uSki7hgbOZ_g,197
41
41
  spb_onprem/data/enums/scene_type.py,sha256=ed8fAKfDk9PNG8YgYv3jI59IR9oCr_gkooexAe0448I,187
42
42
  spb_onprem/data/params/__init__.py,sha256=IApFFU6t3lHclvbivLSFdUxhrj1BjO50c3OMG6zP2iY,1311
43
43
  spb_onprem/data/params/create_data.py,sha256=8DOfbEchf9GSybiYY1cZoNzNYwILokoAkxXRFybJUAU,2038
@@ -67,6 +67,17 @@ spb_onprem/datasets/params/create_dataset.py,sha256=YGhLvY4arthjZwKQ28HLv7ch0Gd2
67
67
  spb_onprem/datasets/params/dataset.py,sha256=WTOUl5M5cc6rtTwhLw_z31Cs209LkBq8Ja4LJGzrmGE,668
68
68
  spb_onprem/datasets/params/datasets.py,sha256=Hx4YlLxfb-Qwi4Y5AFl5pyyjupvuoVcCtxLPGjIV7UY,1580
69
69
  spb_onprem/datasets/params/update_dataset.py,sha256=1oaj2qB9hvnypl4-WtcTNCa4iSuEkJjEalq2JsTm5Ro,924
70
+ spb_onprem/exports/__init__.py,sha256=l_eUjnrFJXs-vdOeMNiWYK-UdJkxArv7hxmKDG4Dfio,181
71
+ spb_onprem/exports/queries.py,sha256=sFT6VX2UwAYyVNkiBteQ_JtKYnhMrt64ww_NuXhUhLM,4084
72
+ spb_onprem/exports/service.py,sha256=xlb6QrzoaZYR5ngUEwKcsjp1IBucubPyN9NGhHGF3QM,6992
73
+ spb_onprem/exports/entities/__init__.py,sha256=_w5Qs_9Dvmy8_tweOmEpGmlMHx8m70rDSl94o7oTfmk,94
74
+ spb_onprem/exports/entities/export.py,sha256=awE2xASCarLcmxNwvjjs6CqWXKptz2M-sGE-AUf74bI,1084
75
+ spb_onprem/exports/params/__init__.py,sha256=F4X2go6V1vZ_pts5thKwW8Gal8otgv6FlLYSMDmPaMg,471
76
+ spb_onprem/exports/params/create_export.py,sha256=vC6qmGETQNQ9PIbe7ayarEe0KuBwylWupBqQOlsDD8E,2594
77
+ spb_onprem/exports/params/delete_export.py,sha256=EusUB86HNLtFYu4gIDJqZsODRETtTYhgxznjFHfxywc,664
78
+ spb_onprem/exports/params/export.py,sha256=0EP6nkQc6vFI-f8218Yq4NxfFcw8MQtHMNkYlGOXqo4,799
79
+ spb_onprem/exports/params/exports.py,sha256=oOQo-2Cqsm3Th3s-0gNcVGjbOyZqB6ujPRLXipulB4Y,2417
80
+ spb_onprem/exports/params/update_export.py,sha256=iOlZoHEN2iiY83hCZWdgTn-O9J8hcSUqd5K7_gEzvP4,3057
70
81
  spb_onprem/slices/__init__.py,sha256=xgpNGYzqgwQ8C-Bgw9AZWMAgBW38UU-U4Wube8hkodI,69
71
82
  spb_onprem/slices/queries.py,sha256=dl_q6Uc2_oeuItgSD6gUL7a3H5VrOW9Ig5Epte7sl78,2732
72
83
  spb_onprem/slices/service.py,sha256=byhB9CdxNKV7uLIiL9yI674UVSlgaAwLaYoGQGBCawE,4988
@@ -81,13 +92,19 @@ spb_onprem/slices/params/update_slice.py,sha256=kryOmCnRTQ_OU0qDKgugppLrpeUpuLwm
81
92
  spb_onprem/users/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
82
93
  spb_onprem/users/entities/__init__.py,sha256=X8HZsCTlQnuPszok3AwI-i7bsQi0Ehul5L_2jZaol5E,57
83
94
  spb_onprem/users/entities/auth.py,sha256=_KP-7yUErBxhJMm-dE3ObprPEG6e0JI2qNg6g8aK1qM,3371
84
- superb_ai_onprem-0.2.0.dist-info/licenses/LICENSE,sha256=CdinbFiHKGkGl6cPde6WgXhMuzyUXEG6tzy2-7udZ8o,1066
95
+ superb_ai_onprem-0.3.0.dist-info/licenses/LICENSE,sha256=CdinbFiHKGkGl6cPde6WgXhMuzyUXEG6tzy2-7udZ8o,1066
85
96
  tests/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
86
97
  tests/activities/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
87
98
  tests/activities/real_test.py,sha256=0gQHg7rIEuZndGZyNHMWSD5nUZPMsUGigfCjWFxMthQ,1786
88
99
  tests/activities/test_params.py,sha256=L3mcnrN2c8dT0AVTjnGedu6LHF3eZ4sSDzz8eaMHJmg,2369
89
100
  tests/activities/test_service.py,sha256=AbWFwjaUjQRs5oDZ2ba0C23XJxCqsFNrxH1LamMtWmU,4698
90
- superb_ai_onprem-0.2.0.dist-info/METADATA,sha256=GYkPH8YMVeOq_dEtkWRgHUOpVg6WaHLHaZLO_MmcJmE,5817
91
- superb_ai_onprem-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
92
- superb_ai_onprem-0.2.0.dist-info/top_level.txt,sha256=LbqU6FjWKaxO7FPS5-71e3OIS8VgBi5VrtQMWFOW25Q,17
93
- superb_ai_onprem-0.2.0.dist-info/RECORD,,
101
+ tests/exports/__init__.py,sha256=iBqr781TtnT1Tm5wLTCsdftpuRgQifIibxWrB_I7tiw,23
102
+ tests/exports/real_test.py,sha256=KeHWvQP-vHCvFWvD4_7qFmreAX9o_Dd_YowaGFogZu8,3765
103
+ tests/exports/test_entities.py,sha256=hG7G4kVkyHKT3mv4lvrzUqOW8ILeHiYj87QZjQcmg9E,8836
104
+ tests/exports/test_integration.py,sha256=cCcEgwIIHyQRlc04EAXSKz7RcblQvhI2GBR3uVaOOq8,6201
105
+ tests/exports/test_params.py,sha256=oRRa6nEru_FImlB3TrmFiBidz6ZstCx4rVaCK-EMGfQ,11070
106
+ tests/exports/test_service.py,sha256=IDcKxrmByh00jk9142P2ThuGureMoijTHNdU0rERGG8,14628
107
+ superb_ai_onprem-0.3.0.dist-info/METADATA,sha256=re2B_sc7Tx1NXJi3WebSt9yEiSLcie3txLgW3HQoY2A,5817
108
+ superb_ai_onprem-0.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
109
+ superb_ai_onprem-0.3.0.dist-info/top_level.txt,sha256=LbqU6FjWKaxO7FPS5-71e3OIS8VgBi5VrtQMWFOW25Q,17
110
+ superb_ai_onprem-0.3.0.dist-info/RECORD,,
@@ -0,0 +1 @@
1
+ # Export tests package
@@ -0,0 +1,130 @@
1
+ from spb_onprem import (
2
+ DatasetService,
3
+ Dataset,
4
+ ExportService,
5
+ Export,
6
+ ExportFilter,
7
+ ExportFilterOptions,
8
+ )
9
+ from spb_onprem.data.params import DataListFilter, DataFilterOptions
10
+ from spb_onprem.data.enums import DataType
11
+
12
+
13
+ def test_export_service():
14
+ # Initialize services
15
+ dataset_service = DatasetService()
16
+ dataset = dataset_service.get_dataset(
17
+ dataset_id="01JPM6NR1APMBXJNC0YW72S1FN"
18
+ )
19
+
20
+ print(f"Dataset: {dataset}")
21
+
22
+ export_service = ExportService()
23
+
24
+ # Test 1: Create an export with DataListFilter
25
+ print("\n=== Creating Export ===")
26
+ data_filter = DataListFilter(
27
+ must_filter=DataFilterOptions(key_contains="validation")
28
+ )
29
+
30
+ new_export = export_service.create_export(
31
+ dataset_id=dataset.id,
32
+ name="SDK Test Export",
33
+ data_filter=data_filter,
34
+ meta={
35
+ "created_by": "sdk_test",
36
+ "purpose": "real_test"
37
+ }
38
+ )
39
+ print(f"Created export: {new_export}")
40
+
41
+ # Test 2: Get exports with pagination
42
+ print("\n=== Getting Exports ===")
43
+ cursor = None
44
+ all_exports = []
45
+ while True:
46
+ exports, cursor, total_count = export_service.get_exports(
47
+ dataset_id=dataset.id,
48
+ cursor=cursor,
49
+ length=10
50
+ )
51
+ all_exports.extend(exports)
52
+ print(f"Fetched {len(exports)} exports, total: {total_count}")
53
+
54
+ if cursor is None:
55
+ break
56
+
57
+ print(f"Total exports found: {len(all_exports)}")
58
+
59
+ # Test 3: Get exports with filter
60
+ print("\n=== Getting Exports with Filter ===")
61
+ export_filter = ExportFilter(
62
+ must_filter=ExportFilterOptions(
63
+ name_contains="SDK Test"
64
+ )
65
+ )
66
+
67
+ filtered_exports, _, filtered_count = export_service.get_exports(
68
+ dataset_id=dataset.id,
69
+ export_filter=export_filter,
70
+ length=50
71
+ )
72
+ print(f"Filtered exports: {len(filtered_exports)}, total: {filtered_count}")
73
+
74
+ # Test 4: Get specific export
75
+ if all_exports:
76
+ print("\n=== Getting Specific Export ===")
77
+ specific_export = export_service.get_export(
78
+ dataset_id=dataset.id,
79
+ export_id=all_exports[0].id
80
+ )
81
+ print(f"Specific export: {specific_export}")
82
+
83
+ # Test 5: Update the created export with complex DataListFilter
84
+ print("\n=== Updating Export ===")
85
+ complex_data_filter = DataListFilter(
86
+ must_filter=DataFilterOptions(
87
+ key_contains="validation",
88
+ type_in=[DataType.SUPERB_IMAGE]
89
+ ),
90
+ not_filter=DataFilterOptions(
91
+ key_contains="test"
92
+ )
93
+ )
94
+
95
+ updated_export = export_service.update_export(
96
+ dataset_id=dataset.id,
97
+ export_id=new_export.id,
98
+ name="SDK Test Export - Updated",
99
+ data_filter=complex_data_filter,
100
+ meta={
101
+ "created_by": "sdk_test",
102
+ "purpose": "real_test",
103
+ "updated": True,
104
+ "status": "completed"
105
+ }
106
+ )
107
+ print(f"Updated export: {updated_export}")
108
+
109
+ # Test 6: Delete the created export
110
+ print("\n=== Deleting Export ===")
111
+ delete_result = export_service.delete_export(
112
+ dataset_id=dataset.id,
113
+ export_id=new_export.id
114
+ )
115
+ print(f"Delete result: {delete_result}")
116
+
117
+ # Test 7: Verify deletion
118
+ print("\n=== Verifying Deletion ===")
119
+ try:
120
+ deleted_export = export_service.get_export(
121
+ dataset_id=dataset.id,
122
+ export_id=new_export.id
123
+ )
124
+ print(f"Export still exists: {deleted_export}")
125
+ except Exception as e:
126
+ print(f"Export successfully deleted (expected error): {e}")
127
+
128
+
129
+ if __name__ == "__main__":
130
+ test_export_service()
@@ -0,0 +1,236 @@
1
+ import pytest
2
+ from spb_onprem.exports.entities import Export
3
+ from spb_onprem.data.params import DataListFilter, DataFilterOptions
4
+
5
+
6
+ class TestExport:
7
+ def test_export_creation_minimal(self):
8
+ # Given
9
+ export_data = {
10
+ "id": "test_export_id",
11
+ "datasetId": "test_dataset_id"
12
+ }
13
+
14
+ # When
15
+ export = Export.model_validate(export_data)
16
+
17
+ # Then
18
+ assert export.id == "test_export_id"
19
+ assert export.dataset_id == "test_dataset_id"
20
+ assert export.name is None
21
+ assert export.data_filter is None
22
+ assert export.location is None
23
+ assert export.data_count is None
24
+ assert export.annotation_count is None
25
+ assert export.frame_count is None
26
+ assert export.meta is None
27
+ assert export.created_at is None
28
+ assert export.created_by is None
29
+ assert export.updated_at is None
30
+ assert export.updated_by is None
31
+ assert export.completed_at is None
32
+
33
+ def test_export_creation_full(self):
34
+ # Given
35
+ export_data = {
36
+ "id": "test_export_id",
37
+ "datasetId": "test_dataset_id",
38
+ "name": "test_export_name",
39
+ "dataFilter": {"must": {"keyContains": "test"}},
40
+ "location": "s3://test-bucket/exports/",
41
+ "dataCount": 100,
42
+ "annotationCount": 50,
43
+ "frameCount": 25,
44
+ "meta": {"created_by": "test_user", "version": 1},
45
+ "createdAt": "2024-01-01T00:00:00Z",
46
+ "createdBy": "test_user",
47
+ "updatedAt": "2024-01-01T12:00:00Z",
48
+ "updatedBy": "test_user",
49
+ "completedAt": "2024-01-01T13:00:00Z"
50
+ }
51
+
52
+ # When
53
+ export = Export.model_validate(export_data)
54
+
55
+ # Then
56
+ assert export.id == "test_export_id"
57
+ assert export.dataset_id == "test_dataset_id"
58
+ assert export.name == "test_export_name"
59
+ assert isinstance(export.data_filter, DataListFilter)
60
+ assert export.data_filter.must_filter.key_contains == "test"
61
+ assert export.location == "s3://test-bucket/exports/"
62
+ assert export.data_count == 100
63
+ assert export.annotation_count == 50
64
+ assert export.frame_count == 25
65
+ assert export.meta == {"created_by": "test_user", "version": 1}
66
+ assert export.created_at == "2024-01-01T00:00:00Z"
67
+ assert export.created_by == "test_user"
68
+ assert export.updated_at == "2024-01-01T12:00:00Z"
69
+ assert export.updated_by == "test_user"
70
+ assert export.completed_at == "2024-01-01T13:00:00Z"
71
+
72
+ def test_export_field_aliases(self):
73
+ # Given
74
+ export_data = {
75
+ "id": "test_export_id",
76
+ "datasetId": "test_dataset_id",
77
+ "dataFilter": {"must": {"keyContains": "filter"}},
78
+ "dataCount": 100,
79
+ "annotationCount": 50,
80
+ "frameCount": 25,
81
+ "createdAt": "2024-01-01T00:00:00Z",
82
+ "createdBy": "test_user",
83
+ "updatedAt": "2024-01-01T12:00:00Z",
84
+ "updatedBy": "test_user",
85
+ "completedAt": "2024-01-01T13:00:00Z"
86
+ }
87
+
88
+ # When
89
+ export = Export.model_validate(export_data)
90
+
91
+ # Then - Test that aliases work correctly
92
+ assert export.dataset_id == "test_dataset_id" # datasetId -> dataset_id
93
+ assert isinstance(export.data_filter, DataListFilter)
94
+ assert export.data_filter.must_filter.key_contains == "filter"
95
+ assert export.data_count == 100 # dataCount -> data_count
96
+ assert export.annotation_count == 50 # annotationCount -> annotation_count
97
+ assert export.frame_count == 25 # frameCount -> frame_count
98
+ assert export.created_at == "2024-01-01T00:00:00Z" # createdAt -> created_at
99
+ assert export.created_by == "test_user" # createdBy -> created_by
100
+ assert export.updated_at == "2024-01-01T12:00:00Z" # updatedAt -> updated_at
101
+ assert export.updated_by == "test_user" # updatedBy -> updated_by
102
+ assert export.completed_at == "2024-01-01T13:00:00Z" # completedAt -> completed_at
103
+
104
+ def test_export_model_dump_with_aliases(self):
105
+ # Given - Create export with DataListFilter
106
+ data_filter = DataListFilter(
107
+ must_filter=DataFilterOptions(key_contains="filter")
108
+ )
109
+
110
+ export = Export(
111
+ id="test_export_id",
112
+ dataset_id="test_dataset_id",
113
+ name="test_export",
114
+ data_filter=data_filter,
115
+ location="s3://test-bucket/",
116
+ data_count=100,
117
+ annotation_count=50,
118
+ frame_count=25,
119
+ meta={"test": "meta"},
120
+ created_at="2024-01-01T00:00:00Z",
121
+ created_by="test_user",
122
+ updated_at="2024-01-01T12:00:00Z",
123
+ updated_by="test_user",
124
+ completed_at="2024-01-01T13:00:00Z"
125
+ )
126
+
127
+ # When
128
+ dumped = export.model_dump(by_alias=True, exclude_unset=True)
129
+
130
+ # Then - Test that field names are properly aliased in output
131
+ assert dumped["id"] == "test_export_id"
132
+ assert dumped["datasetId"] == "test_dataset_id"
133
+ assert dumped["name"] == "test_export"
134
+ assert "dataFilter" in dumped
135
+ assert dumped["dataFilter"]["must"]["keyContains"] == "filter"
136
+ assert dumped["location"] == "s3://test-bucket/"
137
+ assert dumped["dataCount"] == 100
138
+ assert dumped["annotationCount"] == 50
139
+ assert dumped["frameCount"] == 25
140
+ assert dumped["meta"] == {"test": "meta"}
141
+ assert dumped["createdAt"] == "2024-01-01T00:00:00Z"
142
+ assert dumped["createdBy"] == "test_user"
143
+ assert dumped["updatedAt"] == "2024-01-01T12:00:00Z"
144
+ assert dumped["updatedBy"] == "test_user"
145
+ assert dumped["completedAt"] == "2024-01-01T13:00:00Z"
146
+
147
+ def test_export_required_fields_validation(self):
148
+ # Given - Missing required fields
149
+ export_data = {}
150
+
151
+ # When/Then - Should raise validation error for missing required fields
152
+ with pytest.raises(Exception): # ValidationError from pydantic
153
+ Export.model_validate(export_data)
154
+
155
+ def test_export_required_dataset_id_validation(self):
156
+ # Given - Missing dataset_id
157
+ export_data = {
158
+ "id": "test_export_id"
159
+ }
160
+
161
+ # When/Then - Should raise validation error for missing dataset_id
162
+ with pytest.raises(Exception): # ValidationError from pydantic
163
+ Export.model_validate(export_data)
164
+
165
+ def test_export_optional_fields_none(self):
166
+ # Given
167
+ export_data = {
168
+ "id": "test_export_id",
169
+ "datasetId": "test_dataset_id",
170
+ "name": None,
171
+ "dataFilter": None,
172
+ "location": None,
173
+ "dataCount": None,
174
+ "annotationCount": None,
175
+ "frameCount": None,
176
+ "meta": None,
177
+ "createdAt": None,
178
+ "createdBy": None,
179
+ "updatedAt": None,
180
+ "updatedBy": None,
181
+ "completedAt": None
182
+ }
183
+
184
+ # When
185
+ export = Export.model_validate(export_data)
186
+
187
+ # Then - All optional fields should be None
188
+ assert export.name is None
189
+ assert export.data_filter is None
190
+ assert export.location is None
191
+ assert export.data_count is None
192
+ assert export.annotation_count is None
193
+ assert export.frame_count is None
194
+ assert export.meta is None
195
+ assert export.created_at is None
196
+ assert export.created_by is None
197
+ assert export.updated_at is None
198
+ assert export.updated_by is None
199
+ assert export.completed_at is None
200
+
201
+ def test_export_equality(self):
202
+ # Given
203
+ export1 = Export(
204
+ id="test_export_id",
205
+ dataset_id="test_dataset_id",
206
+ name="test_export"
207
+ )
208
+ export2 = Export(
209
+ id="test_export_id",
210
+ dataset_id="test_dataset_id",
211
+ name="test_export"
212
+ )
213
+ export3 = Export(
214
+ id="different_export_id",
215
+ dataset_id="test_dataset_id",
216
+ name="test_export"
217
+ )
218
+
219
+ # When/Then
220
+ assert export1 == export2 # Same data should be equal
221
+ assert export1 != export3 # Different data should not be equal
222
+
223
+ def test_export_string_representation(self):
224
+ # Given
225
+ export = Export(
226
+ id="test_export_id",
227
+ dataset_id="test_dataset_id",
228
+ name="test_export"
229
+ )
230
+
231
+ # When
232
+ str_repr = str(export)
233
+
234
+ # Then
235
+ assert "test_export_id" in str_repr
236
+ assert "test_dataset_id" in str_repr