superb-ai-onprem 0.1.6__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of superb-ai-onprem might be problematic. Click here for more details.
- spb_onprem/__init__.py +26 -3
- spb_onprem/_version.py +2 -2
- spb_onprem/activities/__init__.py +0 -0
- spb_onprem/activities/entities/__init__.py +10 -0
- spb_onprem/activities/entities/activity.py +39 -0
- spb_onprem/activities/entities/activity_history.py +31 -0
- spb_onprem/activities/params/__init__.py +23 -0
- spb_onprem/activities/params/activities.py +53 -0
- spb_onprem/activities/params/activity.py +42 -0
- spb_onprem/activities/params/create_activity.py +80 -0
- spb_onprem/activities/params/delete_activity.py +23 -0
- spb_onprem/activities/params/start_activity.py +59 -0
- spb_onprem/activities/params/update_activity.py +79 -0
- spb_onprem/activities/params/update_activity_history.py +54 -0
- spb_onprem/activities/queries.py +226 -0
- spb_onprem/activities/service.py +315 -0
- spb_onprem/base_model.py +2 -3
- spb_onprem/data/enums/data_type.py +2 -1
- spb_onprem/entities.py +15 -1
- spb_onprem/exports/__init__.py +9 -0
- spb_onprem/exports/entities/__init__.py +7 -0
- spb_onprem/exports/entities/export.py +27 -0
- spb_onprem/exports/params/__init__.py +19 -0
- spb_onprem/exports/params/create_export.py +85 -0
- spb_onprem/exports/params/delete_export.py +30 -0
- spb_onprem/exports/params/export.py +30 -0
- spb_onprem/exports/params/exports.py +74 -0
- spb_onprem/exports/params/update_export.py +98 -0
- spb_onprem/exports/queries.py +158 -0
- spb_onprem/exports/service.py +224 -0
- spb_onprem/searches.py +12 -0
- spb_onprem/slices/params/slices.py +1 -1
- {superb_ai_onprem-0.1.6.dist-info → superb_ai_onprem-0.3.0.dist-info}/METADATA +1 -1
- {superb_ai_onprem-0.1.6.dist-info → superb_ai_onprem-0.3.0.dist-info}/RECORD +48 -12
- {superb_ai_onprem-0.1.6.dist-info → superb_ai_onprem-0.3.0.dist-info}/top_level.txt +1 -0
- tests/__init__.py +1 -0
- tests/activities/__init__.py +1 -0
- tests/activities/real_test.py +66 -0
- tests/activities/test_params.py +67 -0
- tests/activities/test_service.py +139 -0
- tests/exports/__init__.py +1 -0
- tests/exports/real_test.py +130 -0
- tests/exports/test_entities.py +236 -0
- tests/exports/test_integration.py +191 -0
- tests/exports/test_params.py +332 -0
- tests/exports/test_service.py +406 -0
- {superb_ai_onprem-0.1.6.dist-info → superb_ai_onprem-0.3.0.dist-info}/WHEEL +0 -0
- {superb_ai_onprem-0.1.6.dist-info → superb_ai_onprem-0.3.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from spb_onprem.exports.entities import Export
|
|
3
|
+
from spb_onprem.data.params import DataListFilter, DataFilterOptions
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class TestExport:
|
|
7
|
+
def test_export_creation_minimal(self):
|
|
8
|
+
# Given
|
|
9
|
+
export_data = {
|
|
10
|
+
"id": "test_export_id",
|
|
11
|
+
"datasetId": "test_dataset_id"
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
# When
|
|
15
|
+
export = Export.model_validate(export_data)
|
|
16
|
+
|
|
17
|
+
# Then
|
|
18
|
+
assert export.id == "test_export_id"
|
|
19
|
+
assert export.dataset_id == "test_dataset_id"
|
|
20
|
+
assert export.name is None
|
|
21
|
+
assert export.data_filter is None
|
|
22
|
+
assert export.location is None
|
|
23
|
+
assert export.data_count is None
|
|
24
|
+
assert export.annotation_count is None
|
|
25
|
+
assert export.frame_count is None
|
|
26
|
+
assert export.meta is None
|
|
27
|
+
assert export.created_at is None
|
|
28
|
+
assert export.created_by is None
|
|
29
|
+
assert export.updated_at is None
|
|
30
|
+
assert export.updated_by is None
|
|
31
|
+
assert export.completed_at is None
|
|
32
|
+
|
|
33
|
+
def test_export_creation_full(self):
|
|
34
|
+
# Given
|
|
35
|
+
export_data = {
|
|
36
|
+
"id": "test_export_id",
|
|
37
|
+
"datasetId": "test_dataset_id",
|
|
38
|
+
"name": "test_export_name",
|
|
39
|
+
"dataFilter": {"must": {"keyContains": "test"}},
|
|
40
|
+
"location": "s3://test-bucket/exports/",
|
|
41
|
+
"dataCount": 100,
|
|
42
|
+
"annotationCount": 50,
|
|
43
|
+
"frameCount": 25,
|
|
44
|
+
"meta": {"created_by": "test_user", "version": 1},
|
|
45
|
+
"createdAt": "2024-01-01T00:00:00Z",
|
|
46
|
+
"createdBy": "test_user",
|
|
47
|
+
"updatedAt": "2024-01-01T12:00:00Z",
|
|
48
|
+
"updatedBy": "test_user",
|
|
49
|
+
"completedAt": "2024-01-01T13:00:00Z"
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
# When
|
|
53
|
+
export = Export.model_validate(export_data)
|
|
54
|
+
|
|
55
|
+
# Then
|
|
56
|
+
assert export.id == "test_export_id"
|
|
57
|
+
assert export.dataset_id == "test_dataset_id"
|
|
58
|
+
assert export.name == "test_export_name"
|
|
59
|
+
assert isinstance(export.data_filter, DataListFilter)
|
|
60
|
+
assert export.data_filter.must_filter.key_contains == "test"
|
|
61
|
+
assert export.location == "s3://test-bucket/exports/"
|
|
62
|
+
assert export.data_count == 100
|
|
63
|
+
assert export.annotation_count == 50
|
|
64
|
+
assert export.frame_count == 25
|
|
65
|
+
assert export.meta == {"created_by": "test_user", "version": 1}
|
|
66
|
+
assert export.created_at == "2024-01-01T00:00:00Z"
|
|
67
|
+
assert export.created_by == "test_user"
|
|
68
|
+
assert export.updated_at == "2024-01-01T12:00:00Z"
|
|
69
|
+
assert export.updated_by == "test_user"
|
|
70
|
+
assert export.completed_at == "2024-01-01T13:00:00Z"
|
|
71
|
+
|
|
72
|
+
def test_export_field_aliases(self):
|
|
73
|
+
# Given
|
|
74
|
+
export_data = {
|
|
75
|
+
"id": "test_export_id",
|
|
76
|
+
"datasetId": "test_dataset_id",
|
|
77
|
+
"dataFilter": {"must": {"keyContains": "filter"}},
|
|
78
|
+
"dataCount": 100,
|
|
79
|
+
"annotationCount": 50,
|
|
80
|
+
"frameCount": 25,
|
|
81
|
+
"createdAt": "2024-01-01T00:00:00Z",
|
|
82
|
+
"createdBy": "test_user",
|
|
83
|
+
"updatedAt": "2024-01-01T12:00:00Z",
|
|
84
|
+
"updatedBy": "test_user",
|
|
85
|
+
"completedAt": "2024-01-01T13:00:00Z"
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
# When
|
|
89
|
+
export = Export.model_validate(export_data)
|
|
90
|
+
|
|
91
|
+
# Then - Test that aliases work correctly
|
|
92
|
+
assert export.dataset_id == "test_dataset_id" # datasetId -> dataset_id
|
|
93
|
+
assert isinstance(export.data_filter, DataListFilter)
|
|
94
|
+
assert export.data_filter.must_filter.key_contains == "filter"
|
|
95
|
+
assert export.data_count == 100 # dataCount -> data_count
|
|
96
|
+
assert export.annotation_count == 50 # annotationCount -> annotation_count
|
|
97
|
+
assert export.frame_count == 25 # frameCount -> frame_count
|
|
98
|
+
assert export.created_at == "2024-01-01T00:00:00Z" # createdAt -> created_at
|
|
99
|
+
assert export.created_by == "test_user" # createdBy -> created_by
|
|
100
|
+
assert export.updated_at == "2024-01-01T12:00:00Z" # updatedAt -> updated_at
|
|
101
|
+
assert export.updated_by == "test_user" # updatedBy -> updated_by
|
|
102
|
+
assert export.completed_at == "2024-01-01T13:00:00Z" # completedAt -> completed_at
|
|
103
|
+
|
|
104
|
+
def test_export_model_dump_with_aliases(self):
|
|
105
|
+
# Given - Create export with DataListFilter
|
|
106
|
+
data_filter = DataListFilter(
|
|
107
|
+
must_filter=DataFilterOptions(key_contains="filter")
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
export = Export(
|
|
111
|
+
id="test_export_id",
|
|
112
|
+
dataset_id="test_dataset_id",
|
|
113
|
+
name="test_export",
|
|
114
|
+
data_filter=data_filter,
|
|
115
|
+
location="s3://test-bucket/",
|
|
116
|
+
data_count=100,
|
|
117
|
+
annotation_count=50,
|
|
118
|
+
frame_count=25,
|
|
119
|
+
meta={"test": "meta"},
|
|
120
|
+
created_at="2024-01-01T00:00:00Z",
|
|
121
|
+
created_by="test_user",
|
|
122
|
+
updated_at="2024-01-01T12:00:00Z",
|
|
123
|
+
updated_by="test_user",
|
|
124
|
+
completed_at="2024-01-01T13:00:00Z"
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# When
|
|
128
|
+
dumped = export.model_dump(by_alias=True, exclude_unset=True)
|
|
129
|
+
|
|
130
|
+
# Then - Test that field names are properly aliased in output
|
|
131
|
+
assert dumped["id"] == "test_export_id"
|
|
132
|
+
assert dumped["datasetId"] == "test_dataset_id"
|
|
133
|
+
assert dumped["name"] == "test_export"
|
|
134
|
+
assert "dataFilter" in dumped
|
|
135
|
+
assert dumped["dataFilter"]["must"]["keyContains"] == "filter"
|
|
136
|
+
assert dumped["location"] == "s3://test-bucket/"
|
|
137
|
+
assert dumped["dataCount"] == 100
|
|
138
|
+
assert dumped["annotationCount"] == 50
|
|
139
|
+
assert dumped["frameCount"] == 25
|
|
140
|
+
assert dumped["meta"] == {"test": "meta"}
|
|
141
|
+
assert dumped["createdAt"] == "2024-01-01T00:00:00Z"
|
|
142
|
+
assert dumped["createdBy"] == "test_user"
|
|
143
|
+
assert dumped["updatedAt"] == "2024-01-01T12:00:00Z"
|
|
144
|
+
assert dumped["updatedBy"] == "test_user"
|
|
145
|
+
assert dumped["completedAt"] == "2024-01-01T13:00:00Z"
|
|
146
|
+
|
|
147
|
+
def test_export_required_fields_validation(self):
|
|
148
|
+
# Given - Missing required fields
|
|
149
|
+
export_data = {}
|
|
150
|
+
|
|
151
|
+
# When/Then - Should raise validation error for missing required fields
|
|
152
|
+
with pytest.raises(Exception): # ValidationError from pydantic
|
|
153
|
+
Export.model_validate(export_data)
|
|
154
|
+
|
|
155
|
+
def test_export_required_dataset_id_validation(self):
|
|
156
|
+
# Given - Missing dataset_id
|
|
157
|
+
export_data = {
|
|
158
|
+
"id": "test_export_id"
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
# When/Then - Should raise validation error for missing dataset_id
|
|
162
|
+
with pytest.raises(Exception): # ValidationError from pydantic
|
|
163
|
+
Export.model_validate(export_data)
|
|
164
|
+
|
|
165
|
+
def test_export_optional_fields_none(self):
|
|
166
|
+
# Given
|
|
167
|
+
export_data = {
|
|
168
|
+
"id": "test_export_id",
|
|
169
|
+
"datasetId": "test_dataset_id",
|
|
170
|
+
"name": None,
|
|
171
|
+
"dataFilter": None,
|
|
172
|
+
"location": None,
|
|
173
|
+
"dataCount": None,
|
|
174
|
+
"annotationCount": None,
|
|
175
|
+
"frameCount": None,
|
|
176
|
+
"meta": None,
|
|
177
|
+
"createdAt": None,
|
|
178
|
+
"createdBy": None,
|
|
179
|
+
"updatedAt": None,
|
|
180
|
+
"updatedBy": None,
|
|
181
|
+
"completedAt": None
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
# When
|
|
185
|
+
export = Export.model_validate(export_data)
|
|
186
|
+
|
|
187
|
+
# Then - All optional fields should be None
|
|
188
|
+
assert export.name is None
|
|
189
|
+
assert export.data_filter is None
|
|
190
|
+
assert export.location is None
|
|
191
|
+
assert export.data_count is None
|
|
192
|
+
assert export.annotation_count is None
|
|
193
|
+
assert export.frame_count is None
|
|
194
|
+
assert export.meta is None
|
|
195
|
+
assert export.created_at is None
|
|
196
|
+
assert export.created_by is None
|
|
197
|
+
assert export.updated_at is None
|
|
198
|
+
assert export.updated_by is None
|
|
199
|
+
assert export.completed_at is None
|
|
200
|
+
|
|
201
|
+
def test_export_equality(self):
|
|
202
|
+
# Given
|
|
203
|
+
export1 = Export(
|
|
204
|
+
id="test_export_id",
|
|
205
|
+
dataset_id="test_dataset_id",
|
|
206
|
+
name="test_export"
|
|
207
|
+
)
|
|
208
|
+
export2 = Export(
|
|
209
|
+
id="test_export_id",
|
|
210
|
+
dataset_id="test_dataset_id",
|
|
211
|
+
name="test_export"
|
|
212
|
+
)
|
|
213
|
+
export3 = Export(
|
|
214
|
+
id="different_export_id",
|
|
215
|
+
dataset_id="test_dataset_id",
|
|
216
|
+
name="test_export"
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
# When/Then
|
|
220
|
+
assert export1 == export2 # Same data should be equal
|
|
221
|
+
assert export1 != export3 # Different data should not be equal
|
|
222
|
+
|
|
223
|
+
def test_export_string_representation(self):
|
|
224
|
+
# Given
|
|
225
|
+
export = Export(
|
|
226
|
+
id="test_export_id",
|
|
227
|
+
dataset_id="test_dataset_id",
|
|
228
|
+
name="test_export"
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# When
|
|
232
|
+
str_repr = str(export)
|
|
233
|
+
|
|
234
|
+
# Then
|
|
235
|
+
assert "test_export_id" in str_repr
|
|
236
|
+
assert "test_dataset_id" in str_repr
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Integration test for the exports module.
|
|
3
|
+
This test demonstrates how to use the exports module in a real scenario.
|
|
4
|
+
Note: This test is commented out as it requires actual API credentials and dataset.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from spb_onprem.exports import ExportService, ExportFilterOptions
|
|
8
|
+
from spb_onprem.exports.params import ExportFilter
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def test_export_service_integration():
|
|
12
|
+
"""
|
|
13
|
+
Integration test for ExportService.
|
|
14
|
+
|
|
15
|
+
This test demonstrates the complete workflow of:
|
|
16
|
+
1. Creating an export
|
|
17
|
+
2. Getting exports with filtering
|
|
18
|
+
3. Getting a specific export
|
|
19
|
+
4. Updating an export
|
|
20
|
+
5. Deleting an export
|
|
21
|
+
|
|
22
|
+
Note: This test is commented out as it requires actual API connection.
|
|
23
|
+
"""
|
|
24
|
+
# Uncomment the following lines to run with real API
|
|
25
|
+
|
|
26
|
+
# # Initialize the service
|
|
27
|
+
# export_service = ExportService()
|
|
28
|
+
# dataset_id = "your_dataset_id_here"
|
|
29
|
+
|
|
30
|
+
# # 1. Create an export
|
|
31
|
+
# new_export = export_service.create_export(
|
|
32
|
+
# dataset_id=dataset_id,
|
|
33
|
+
# name="Test Export from SDK",
|
|
34
|
+
# data_filter={"must": {"keyContains": "validation"}},
|
|
35
|
+
# meta={"created_by": "integration_test", "purpose": "testing"}
|
|
36
|
+
# )
|
|
37
|
+
# print(f"Created export: {new_export.id}")
|
|
38
|
+
|
|
39
|
+
# # 2. Get exports with filtering
|
|
40
|
+
# export_filter = ExportFilter(
|
|
41
|
+
# must_filter=ExportFilterOptions(name_contains="Test Export")
|
|
42
|
+
# )
|
|
43
|
+
# exports, next_cursor, total_count = export_service.get_exports(
|
|
44
|
+
# dataset_id=dataset_id,
|
|
45
|
+
# export_filter=export_filter,
|
|
46
|
+
# length=10
|
|
47
|
+
# )
|
|
48
|
+
# print(f"Found {len(exports)} exports, total: {total_count}")
|
|
49
|
+
|
|
50
|
+
# # 3. Get a specific export
|
|
51
|
+
# if exports:
|
|
52
|
+
# export_detail = export_service.get_export(
|
|
53
|
+
# dataset_id=dataset_id,
|
|
54
|
+
# export_id=exports[0].id
|
|
55
|
+
# )
|
|
56
|
+
# print(f"Export details: {export_detail.name}")
|
|
57
|
+
|
|
58
|
+
# # 4. Update the export
|
|
59
|
+
# updated_export = export_service.update_export(
|
|
60
|
+
# dataset_id=dataset_id,
|
|
61
|
+
# export_id=new_export.id,
|
|
62
|
+
# name="Updated Test Export",
|
|
63
|
+
# meta={"updated_by": "integration_test", "status": "updated"}
|
|
64
|
+
# )
|
|
65
|
+
# print(f"Updated export name: {updated_export.name}")
|
|
66
|
+
|
|
67
|
+
# # 5. Delete the export
|
|
68
|
+
# delete_result = export_service.delete_export(
|
|
69
|
+
# dataset_id=dataset_id,
|
|
70
|
+
# export_id=new_export.id
|
|
71
|
+
# )
|
|
72
|
+
# print(f"Export deleted: {delete_result}")
|
|
73
|
+
|
|
74
|
+
# For now, just pass the test
|
|
75
|
+
assert True
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def test_export_filtering_examples():
|
|
79
|
+
"""
|
|
80
|
+
Examples of how to use export filtering.
|
|
81
|
+
"""
|
|
82
|
+
# Example 1: Filter by name containing specific text
|
|
83
|
+
name_filter = ExportFilter(
|
|
84
|
+
must_filter=ExportFilterOptions(name_contains="validation")
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
# Example 2: Filter by exact name match
|
|
88
|
+
exact_name_filter = ExportFilter(
|
|
89
|
+
must_filter=ExportFilterOptions(name="My Export")
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Example 3: Filter by location containing specific text
|
|
93
|
+
location_filter = ExportFilter(
|
|
94
|
+
must_filter=ExportFilterOptions(location_contains="s3://my-bucket")
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
# Example 4: Complex filter with must and must not conditions
|
|
98
|
+
complex_filter = ExportFilter(
|
|
99
|
+
must_filter=ExportFilterOptions(
|
|
100
|
+
name_contains="production",
|
|
101
|
+
location_contains="s3://"
|
|
102
|
+
),
|
|
103
|
+
not_filter=ExportFilterOptions(
|
|
104
|
+
name_contains="test"
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Example 5: Filter by multiple IDs
|
|
109
|
+
id_filter = ExportFilter(
|
|
110
|
+
must_filter=ExportFilterOptions(
|
|
111
|
+
id_in=["export_id_1", "export_id_2", "export_id_3"]
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# All filters should be valid - test the objects themselves
|
|
116
|
+
assert isinstance(name_filter.must_filter, ExportFilterOptions)
|
|
117
|
+
assert isinstance(exact_name_filter.must_filter, ExportFilterOptions)
|
|
118
|
+
assert isinstance(location_filter.must_filter, ExportFilterOptions)
|
|
119
|
+
assert isinstance(complex_filter.must_filter, ExportFilterOptions)
|
|
120
|
+
assert isinstance(complex_filter.not_filter, ExportFilterOptions)
|
|
121
|
+
assert isinstance(id_filter.must_filter, ExportFilterOptions)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def test_export_service_usage_patterns():
|
|
125
|
+
"""
|
|
126
|
+
Demonstrates common usage patterns for the export service.
|
|
127
|
+
"""
|
|
128
|
+
# Pattern 1: Pagination through all exports
|
|
129
|
+
def get_all_exports(export_service, dataset_id):
|
|
130
|
+
all_exports = []
|
|
131
|
+
cursor = None
|
|
132
|
+
|
|
133
|
+
while True:
|
|
134
|
+
exports, next_cursor, _ = export_service.get_exports(
|
|
135
|
+
dataset_id=dataset_id,
|
|
136
|
+
cursor=cursor,
|
|
137
|
+
length=50 # Fetch 50 at a time
|
|
138
|
+
)
|
|
139
|
+
all_exports.extend(exports)
|
|
140
|
+
|
|
141
|
+
if next_cursor is None:
|
|
142
|
+
break
|
|
143
|
+
cursor = next_cursor
|
|
144
|
+
|
|
145
|
+
return all_exports
|
|
146
|
+
|
|
147
|
+
# Pattern 2: Find exports by criteria
|
|
148
|
+
def find_exports_by_name(export_service, dataset_id, name_pattern):
|
|
149
|
+
filter_options = ExportFilter(
|
|
150
|
+
must_filter=ExportFilterOptions(name_contains=name_pattern)
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
exports, _, _ = export_service.get_exports(
|
|
154
|
+
dataset_id=dataset_id,
|
|
155
|
+
export_filter=filter_options,
|
|
156
|
+
length=100
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
return exports
|
|
160
|
+
|
|
161
|
+
# Pattern 3: Bulk operations
|
|
162
|
+
def cleanup_test_exports(export_service, dataset_id):
|
|
163
|
+
test_filter = ExportFilter(
|
|
164
|
+
must_filter=ExportFilterOptions(name_contains="test")
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
exports, _, _ = export_service.get_exports(
|
|
168
|
+
dataset_id=dataset_id,
|
|
169
|
+
export_filter=test_filter,
|
|
170
|
+
length=100
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
deleted_count = 0
|
|
174
|
+
for export in exports:
|
|
175
|
+
if export_service.delete_export(dataset_id, export.id):
|
|
176
|
+
deleted_count += 1
|
|
177
|
+
|
|
178
|
+
return deleted_count
|
|
179
|
+
|
|
180
|
+
# These are just example functions, so we'll just assert they exist
|
|
181
|
+
assert callable(get_all_exports)
|
|
182
|
+
assert callable(find_exports_by_name)
|
|
183
|
+
assert callable(cleanup_test_exports)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
if __name__ == "__main__":
|
|
187
|
+
# Run the integration test
|
|
188
|
+
test_export_service_integration()
|
|
189
|
+
test_export_filtering_examples()
|
|
190
|
+
test_export_service_usage_patterns()
|
|
191
|
+
print("All integration tests passed!")
|