dtlpy 1.114.17__py3-none-any.whl → 1.116.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +491 -491
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +347 -311
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +292 -296
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +449 -442
- dtlpy/entities/dataset.py +1299 -1285
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +235 -223
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +798 -645
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +959 -953
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +684 -684
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +810 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +299 -299
- dtlpy/entities/pipeline.py +624 -624
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +505 -499
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +963 -958
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +495 -495
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +1257 -1086
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -332
- dtlpy/repositories/commands.py +152 -158
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +439 -435
- dtlpy/repositories/datasets.py +1504 -1291
- dtlpy/repositories/downloader.py +976 -903
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +482 -470
- dtlpy/repositories/executions.py +815 -817
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +255 -238
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +912 -909
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +1000 -988
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +451 -451
- dtlpy/repositories/pipelines.py +640 -640
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +419 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1477 -1477
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +661 -651
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1785 -1782
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +285 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp +1 -1
- dtlpy-1.116.6.data/scripts/dlp.bat +2 -0
- {dtlpy-1.114.17.data → dtlpy-1.116.6.data}/scripts/dlp.py +128 -128
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/METADATA +186 -183
- dtlpy-1.116.6.dist-info/RECORD +239 -0
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/WHEEL +1 -1
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/licenses/LICENSE +200 -200
- tests/features/environment.py +551 -551
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- dtlpy-1.114.17.data/scripts/dlp.bat +0 -2
- dtlpy-1.114.17.dist-info/RECORD +0 -240
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.114.17.dist-info → dtlpy-1.116.6.dist-info}/top_level.txt +0 -0
dtlpy/entities/filters.py
CHANGED
|
@@ -1,645 +1,798 @@
|
|
|
1
|
-
import
|
|
2
|
-
import
|
|
3
|
-
import
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
from
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
""
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
self.
|
|
110
|
-
self.
|
|
111
|
-
self.
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
self.
|
|
115
|
-
self.
|
|
116
|
-
self.
|
|
117
|
-
self.
|
|
118
|
-
self.
|
|
119
|
-
self.
|
|
120
|
-
self.
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
self.
|
|
124
|
-
self.
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
)
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
self.
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
self.
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
self.
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
self.
|
|
175
|
-
self.
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
self.
|
|
179
|
-
self.
|
|
180
|
-
self.
|
|
181
|
-
self.
|
|
182
|
-
self.
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
:
|
|
243
|
-
|
|
244
|
-
""
|
|
245
|
-
for
|
|
246
|
-
if
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
self.
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
if self.
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
self.
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
self.
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
if
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
#
|
|
443
|
-
|
|
444
|
-
if
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
:
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
:
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
pages
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
"""
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
if
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
1
|
+
import numpy as np
|
|
2
|
+
import urllib.parse
|
|
3
|
+
import logging
|
|
4
|
+
import json
|
|
5
|
+
import os
|
|
6
|
+
import io
|
|
7
|
+
import copy
|
|
8
|
+
from typing import Generator, Tuple, Optional
|
|
9
|
+
from collections import deque
|
|
10
|
+
from concurrent.futures import ThreadPoolExecutor, wait, FIRST_COMPLETED
|
|
11
|
+
from bson import ObjectId
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
from enum import Enum
|
|
15
|
+
from .. import exceptions, entities
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(name="dtlpy")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class FiltersKnownFields(str, Enum):
|
|
21
|
+
DIR = "dir"
|
|
22
|
+
ANNOTATED = "annotated"
|
|
23
|
+
FILENAME = "filename"
|
|
24
|
+
CREATED_AT = "createdAt"
|
|
25
|
+
UPDATED_AT = "updatedAt"
|
|
26
|
+
LABEL = "label"
|
|
27
|
+
NAME = "name"
|
|
28
|
+
HIDDEN = "hidden"
|
|
29
|
+
TYPE = "type"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class FiltersResource(str, Enum):
|
|
33
|
+
ITEM = "items"
|
|
34
|
+
ANNOTATION = "annotations"
|
|
35
|
+
EXECUTION = "executions"
|
|
36
|
+
PACKAGE = "packages"
|
|
37
|
+
DPK = "dpks"
|
|
38
|
+
APP = "apps"
|
|
39
|
+
SERVICE = "services"
|
|
40
|
+
TRIGGER = "triggers"
|
|
41
|
+
MODEL = "models"
|
|
42
|
+
WEBHOOK = "webhooks"
|
|
43
|
+
RECIPE = "recipe"
|
|
44
|
+
DATASET = "datasets"
|
|
45
|
+
ONTOLOGY = "ontology"
|
|
46
|
+
TASK = "tasks"
|
|
47
|
+
PIPELINE = "pipeline"
|
|
48
|
+
PIPELINE_EXECUTION = "pipelineState"
|
|
49
|
+
COMPOSITION = "composition"
|
|
50
|
+
FEATURE = "feature_vectors"
|
|
51
|
+
FEATURE_SET = "feature_sets"
|
|
52
|
+
ORGANIZATIONS = "organizations"
|
|
53
|
+
DRIVERS = "drivers"
|
|
54
|
+
SETTINGS = "setting"
|
|
55
|
+
RESOURCE_EXECUTION = "resourceExecution"
|
|
56
|
+
METRICS = ("metrics",)
|
|
57
|
+
SERVICE_DRIVER = ("serviceDrivers",)
|
|
58
|
+
COMPUTE = "compute"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class FiltersOperations(str, Enum):
|
|
62
|
+
OR = "or"
|
|
63
|
+
AND = "and"
|
|
64
|
+
IN = "in"
|
|
65
|
+
NOT_EQUAL = "ne"
|
|
66
|
+
EQUAL = "eq"
|
|
67
|
+
GREATER_THAN = "gt"
|
|
68
|
+
LESS_THAN = "lt"
|
|
69
|
+
EXISTS = "exists"
|
|
70
|
+
MATCH = "match"
|
|
71
|
+
NIN = "nin"
|
|
72
|
+
GREATER_THAN_OR_EQUAL = "gte"
|
|
73
|
+
LESS_THAN_OR_EQUAL = "lte"
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class FiltersMethod(str, Enum):
|
|
77
|
+
OR = "or"
|
|
78
|
+
AND = "and"
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class FiltersOrderByDirection(str, Enum):
|
|
82
|
+
DESCENDING = "descending"
|
|
83
|
+
ASCENDING = "ascending"
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class Filters:
|
|
87
|
+
"""
|
|
88
|
+
Filters entity to filter items from pages in platform
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
def __init__(
|
|
92
|
+
self,
|
|
93
|
+
field=None,
|
|
94
|
+
values=None,
|
|
95
|
+
operator: FiltersOperations = None,
|
|
96
|
+
method: FiltersMethod = None,
|
|
97
|
+
custom_filter=None,
|
|
98
|
+
resource: FiltersResource = FiltersResource.ITEM,
|
|
99
|
+
use_defaults=True,
|
|
100
|
+
context=None,
|
|
101
|
+
page_size=None,
|
|
102
|
+
):
|
|
103
|
+
if page_size is None:
|
|
104
|
+
if resource in [FiltersResource.EXECUTION, FiltersResource.PIPELINE_EXECUTION, FiltersResource.DPK]:
|
|
105
|
+
page_size = 100
|
|
106
|
+
else:
|
|
107
|
+
page_size = 1000
|
|
108
|
+
|
|
109
|
+
self.or_filter_list = list()
|
|
110
|
+
self.and_filter_list = list()
|
|
111
|
+
self._unique_fields = list()
|
|
112
|
+
self.custom_filter = custom_filter
|
|
113
|
+
self.known_operators = ["or", "and", "in", "ne", "eq", "gt", "lt", "exists"]
|
|
114
|
+
self._resource = resource
|
|
115
|
+
self.page = 0
|
|
116
|
+
self.page_size = page_size
|
|
117
|
+
self.method = FiltersMethod.AND
|
|
118
|
+
self.sort = dict()
|
|
119
|
+
self.join = None
|
|
120
|
+
self.recursive = True
|
|
121
|
+
|
|
122
|
+
# system only - task and assignment attributes
|
|
123
|
+
self._user_query = "true"
|
|
124
|
+
self._ref_task = False
|
|
125
|
+
self._ref_assignment = False
|
|
126
|
+
self._ref_op = None
|
|
127
|
+
self._ref_assignment_id = None
|
|
128
|
+
self._ref_task_id = None
|
|
129
|
+
self._system_space = None
|
|
130
|
+
|
|
131
|
+
self._use_defaults = use_defaults
|
|
132
|
+
self.__add_defaults()
|
|
133
|
+
self.context = context
|
|
134
|
+
|
|
135
|
+
if field is not None:
|
|
136
|
+
self.add(field=field, values=values, operator=operator, method=method)
|
|
137
|
+
|
|
138
|
+
def __validate_page_size(self):
|
|
139
|
+
max_page_size = self.__max_page_size
|
|
140
|
+
if self.page_size > max_page_size:
|
|
141
|
+
logger.warning(
|
|
142
|
+
"Cannot list {} with page size greater than {}. Changing page_size to {}.".format(
|
|
143
|
+
self.resource, max_page_size, max_page_size
|
|
144
|
+
)
|
|
145
|
+
)
|
|
146
|
+
self.page_size = max_page_size
|
|
147
|
+
|
|
148
|
+
@property
|
|
149
|
+
def __max_page_size(self):
|
|
150
|
+
page_size = 1000
|
|
151
|
+
if self.resource in [FiltersResource.EXECUTION, FiltersResource.PIPELINE_EXECUTION]:
|
|
152
|
+
page_size = 100
|
|
153
|
+
return page_size
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def resource(self):
|
|
157
|
+
return f"{self._resource.value}" if isinstance(self._resource, FiltersResource) else f"{self._resource}"
|
|
158
|
+
|
|
159
|
+
@resource.setter
|
|
160
|
+
def resource(self, resource):
|
|
161
|
+
self._resource = resource
|
|
162
|
+
self.reset()
|
|
163
|
+
self.__add_defaults()
|
|
164
|
+
|
|
165
|
+
@property
|
|
166
|
+
def system_space(self):
|
|
167
|
+
return self._system_space
|
|
168
|
+
|
|
169
|
+
@system_space.setter
|
|
170
|
+
def system_space(self, val: bool):
|
|
171
|
+
self._system_space = val
|
|
172
|
+
|
|
173
|
+
def reset(self):
|
|
174
|
+
self.or_filter_list = list()
|
|
175
|
+
self.and_filter_list = list()
|
|
176
|
+
self._unique_fields = list()
|
|
177
|
+
self.custom_filter = None
|
|
178
|
+
self.page = 0
|
|
179
|
+
self.page_size = 1000
|
|
180
|
+
self.method = FiltersMethod.AND
|
|
181
|
+
self.sort = dict()
|
|
182
|
+
self.join = None
|
|
183
|
+
self.recursive = True
|
|
184
|
+
self._nullify_refs()
|
|
185
|
+
|
|
186
|
+
def _nullify_refs(self):
|
|
187
|
+
self._ref_task = False
|
|
188
|
+
self._ref_assignment = False
|
|
189
|
+
self._ref_op = None
|
|
190
|
+
self._ref_assignment_id = None
|
|
191
|
+
self._ref_task_id = None
|
|
192
|
+
|
|
193
|
+
def add(self, field, values, operator: FiltersOperations = None, method: FiltersMethod = None):
|
|
194
|
+
"""
|
|
195
|
+
Add filter
|
|
196
|
+
|
|
197
|
+
:param str field: Metadata field / attribute
|
|
198
|
+
:param values: field values
|
|
199
|
+
:param dl.FiltersOperations operator: optional - in, gt, lt, eq, ne
|
|
200
|
+
:param dl.FiltersMethod method: Optional - or/and
|
|
201
|
+
|
|
202
|
+
**Example**:
|
|
203
|
+
|
|
204
|
+
.. code-block:: python
|
|
205
|
+
|
|
206
|
+
filter.add(field='metadata.user', values=['1','2'], operator=dl.FiltersOperations.IN)
|
|
207
|
+
"""
|
|
208
|
+
if method is None:
|
|
209
|
+
method = self.method
|
|
210
|
+
if "metadata.system.refs.metadata" in field and self.resource == FiltersResource.ITEM:
|
|
211
|
+
logger.warning(
|
|
212
|
+
"Filtering by metadata.system.refs.metadata may cause incorrect results. please use match operator"
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
# create SingleFilter object and add to self.filter_list
|
|
216
|
+
if method == FiltersMethod.OR:
|
|
217
|
+
self.or_filter_list.append(SingleFilter(field=field, values=values, operator=operator))
|
|
218
|
+
elif method == FiltersMethod.AND:
|
|
219
|
+
self.__override(field=field, values=values, operator=operator)
|
|
220
|
+
else:
|
|
221
|
+
raise exceptions.PlatformException(
|
|
222
|
+
error="400", message="Unknown method {}, please select from: or/and".format(method)
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
def __override(self, field, values, operator=None):
|
|
226
|
+
if field in self._unique_fields:
|
|
227
|
+
indices_to_remove = []
|
|
228
|
+
for i_single_filter, single_filter in enumerate(self.and_filter_list):
|
|
229
|
+
if single_filter.field == field:
|
|
230
|
+
indices_to_remove.append(i_single_filter)
|
|
231
|
+
|
|
232
|
+
# Remove indices in descending order to avoid IndexError
|
|
233
|
+
# When removing items, indices shift down, so we must remove from highest to lowest
|
|
234
|
+
for index in sorted(indices_to_remove, reverse=True):
|
|
235
|
+
self.and_filter_list.pop(index)
|
|
236
|
+
self.and_filter_list.append(SingleFilter(field=field, values=values, operator=operator))
|
|
237
|
+
|
|
238
|
+
def generate_url_query_params(self, url):
|
|
239
|
+
"""
|
|
240
|
+
generate url query params
|
|
241
|
+
|
|
242
|
+
:param str url:
|
|
243
|
+
"""
|
|
244
|
+
url = "{}?".format(url)
|
|
245
|
+
for f in self.and_filter_list:
|
|
246
|
+
if isinstance(f.values, list):
|
|
247
|
+
url = "{}{}={}&".format(url, f.field, ",".join(f.values))
|
|
248
|
+
else:
|
|
249
|
+
url = "{}{}={}&".format(url, f.field, f.values)
|
|
250
|
+
return "{}&pageOffset={}&pageSize={}".format(url, self.page, self.page_size)
|
|
251
|
+
|
|
252
|
+
def has_field(self, field):
|
|
253
|
+
"""
|
|
254
|
+
is filter has field
|
|
255
|
+
|
|
256
|
+
:param str field: field to check
|
|
257
|
+
:return: Ture is have it
|
|
258
|
+
:rtype: bool
|
|
259
|
+
"""
|
|
260
|
+
for single_filter in self.or_filter_list:
|
|
261
|
+
if single_filter.field == field:
|
|
262
|
+
return True
|
|
263
|
+
|
|
264
|
+
for single_filter in self.and_filter_list:
|
|
265
|
+
if single_filter.field == field:
|
|
266
|
+
return True
|
|
267
|
+
|
|
268
|
+
return False
|
|
269
|
+
|
|
270
|
+
def pop(self, field):
|
|
271
|
+
"""
|
|
272
|
+
Pop filed
|
|
273
|
+
|
|
274
|
+
:param str field: field to pop
|
|
275
|
+
"""
|
|
276
|
+
for single_filter in self.or_filter_list:
|
|
277
|
+
if single_filter.field == field:
|
|
278
|
+
self.or_filter_list.remove(single_filter)
|
|
279
|
+
|
|
280
|
+
for single_filter in self.and_filter_list:
|
|
281
|
+
if single_filter.field == field:
|
|
282
|
+
self.and_filter_list.remove(single_filter)
|
|
283
|
+
|
|
284
|
+
def pop_join(self, field):
|
|
285
|
+
"""
|
|
286
|
+
Pop join
|
|
287
|
+
|
|
288
|
+
:param str field: field to pop
|
|
289
|
+
"""
|
|
290
|
+
if self.join is not None:
|
|
291
|
+
for single_filter in self.join["filter"]["$and"]:
|
|
292
|
+
if field in single_filter:
|
|
293
|
+
self.join["filter"]["$and"].remove(single_filter)
|
|
294
|
+
|
|
295
|
+
def add_join(self, field, values, operator: FiltersOperations = None, method: FiltersMethod = FiltersMethod.AND):
|
|
296
|
+
"""
|
|
297
|
+
join a query to the filter
|
|
298
|
+
|
|
299
|
+
:param str field: Metadata field / attribute
|
|
300
|
+
:param str or list values: field values
|
|
301
|
+
:param dl.FiltersOperations operator: optional - in, gt, lt, eq, ne
|
|
302
|
+
:param method: optional - str - FiltersMethod.AND, FiltersMethod.OR
|
|
303
|
+
|
|
304
|
+
**Example**:
|
|
305
|
+
|
|
306
|
+
.. code-block:: python
|
|
307
|
+
|
|
308
|
+
filter.add_join(field='metadata.user', values=['1','2'], operator=dl.FiltersOperations.IN)
|
|
309
|
+
"""
|
|
310
|
+
if self.resource not in [FiltersResource.ITEM, FiltersResource.ANNOTATION]:
|
|
311
|
+
raise exceptions.PlatformException(error="400", message="Cannot join to {} filters".format(self.resource))
|
|
312
|
+
|
|
313
|
+
if self.join is None:
|
|
314
|
+
self.join = dict()
|
|
315
|
+
if "on" not in self.join:
|
|
316
|
+
if self.resource == FiltersResource.ITEM:
|
|
317
|
+
self.join["on"] = {"resource": FiltersResource.ANNOTATION.value, "local": "itemId", "forigen": "id"}
|
|
318
|
+
else:
|
|
319
|
+
self.join["on"] = {"resource": FiltersResource.ITEM.value, "local": "id", "forigen": "itemId"}
|
|
320
|
+
if "filter" not in self.join:
|
|
321
|
+
self.join["filter"] = dict()
|
|
322
|
+
join_method = "$" + method
|
|
323
|
+
if join_method not in self.join["filter"]:
|
|
324
|
+
self.join["filter"][join_method] = list()
|
|
325
|
+
self.join["filter"][join_method].append(SingleFilter(field=field, values=values, operator=operator).prepare())
|
|
326
|
+
|
|
327
|
+
def __add_defaults(self):
|
|
328
|
+
if self._use_defaults:
|
|
329
|
+
# add items defaults
|
|
330
|
+
if self.resource == FiltersResource.ITEM:
|
|
331
|
+
self._unique_fields = ["type", "hidden"]
|
|
332
|
+
self.add(field="hidden", values=False, method=FiltersMethod.AND)
|
|
333
|
+
self.add(field="type", values="file", method=FiltersMethod.AND)
|
|
334
|
+
# add service defaults
|
|
335
|
+
elif self.resource == FiltersResource.SERVICE:
|
|
336
|
+
self._unique_fields = ["global"]
|
|
337
|
+
self.add(field="global", values=True, operator=FiltersOperations.NOT_EQUAL, method=FiltersMethod.AND)
|
|
338
|
+
elif self.resource == FiltersResource.PACKAGE:
|
|
339
|
+
self._unique_fields = ["global"]
|
|
340
|
+
self.add(field="global", values=True, operator=FiltersOperations.NOT_EQUAL, method=FiltersMethod.AND)
|
|
341
|
+
# add annotations defaults
|
|
342
|
+
elif self.resource == FiltersResource.ANNOTATION:
|
|
343
|
+
self._unique_fields = ["type"]
|
|
344
|
+
values = [annotation_type.value for annotation_type in entities.AnnotationType]
|
|
345
|
+
values.remove(entities.AnnotationType.NOTE.value)
|
|
346
|
+
self.add(field="type", values=values, operator=FiltersOperations.IN, method=FiltersMethod.AND)
|
|
347
|
+
|
|
348
|
+
def __generate_query(self):
|
|
349
|
+
filters_dict = dict()
|
|
350
|
+
|
|
351
|
+
if len(self.or_filter_list) > 0:
|
|
352
|
+
or_filters = list()
|
|
353
|
+
for single_filter in self.or_filter_list:
|
|
354
|
+
or_filters.append(
|
|
355
|
+
single_filter.prepare(recursive=self.recursive and self.resource == FiltersResource.ITEM)
|
|
356
|
+
)
|
|
357
|
+
filters_dict["$or"] = or_filters
|
|
358
|
+
|
|
359
|
+
if len(self.and_filter_list) > 0:
|
|
360
|
+
and_filters = list()
|
|
361
|
+
for single_filter in self.and_filter_list:
|
|
362
|
+
and_filters.append(
|
|
363
|
+
single_filter.prepare(recursive=self.recursive and self.resource == FiltersResource.ITEM)
|
|
364
|
+
)
|
|
365
|
+
filters_dict["$and"] = and_filters
|
|
366
|
+
|
|
367
|
+
return filters_dict
|
|
368
|
+
|
|
369
|
+
def __generate_custom_query(self):
|
|
370
|
+
if "filter" not in self.custom_filter:
|
|
371
|
+
query_dict = {"filter": self.custom_filter}
|
|
372
|
+
else:
|
|
373
|
+
query_dict = self.custom_filter
|
|
374
|
+
if "resource" not in query_dict:
|
|
375
|
+
query_dict["resource"] = self.resource
|
|
376
|
+
if "page" not in query_dict:
|
|
377
|
+
query_dict["page"] = self.page
|
|
378
|
+
if "pageSize" not in query_dict:
|
|
379
|
+
query_dict["pageSize"] = self.page_size
|
|
380
|
+
if self.join is not None and 'join' not in query_dict:
|
|
381
|
+
query_dict["join"] = self.join
|
|
382
|
+
if "join" in query_dict and "on" not in query_dict["join"]:
|
|
383
|
+
if self.resource == FiltersResource.ITEM:
|
|
384
|
+
query_dict["join"]["on"] = {
|
|
385
|
+
"resource": FiltersResource.ANNOTATION.value,
|
|
386
|
+
"local": "itemId",
|
|
387
|
+
"forigen": "id",
|
|
388
|
+
}
|
|
389
|
+
else:
|
|
390
|
+
query_dict["join"]["on"] = {"resource": FiltersResource.ITEM.value, "local": "id", "forigen": "itemId"}
|
|
391
|
+
|
|
392
|
+
return query_dict
|
|
393
|
+
|
|
394
|
+
def __generate_ref_query(self):
|
|
395
|
+
refs = list()
|
|
396
|
+
if self._ref_task:
|
|
397
|
+
task_refs = list()
|
|
398
|
+
if not isinstance(self._ref_task_id, list):
|
|
399
|
+
self._ref_task_id = [self._ref_task_id]
|
|
400
|
+
|
|
401
|
+
for ref_id in self._ref_task_id:
|
|
402
|
+
task_refs.append({"type": "task", "id": ref_id})
|
|
403
|
+
|
|
404
|
+
refs += task_refs
|
|
405
|
+
|
|
406
|
+
if self._ref_assignment:
|
|
407
|
+
assignment_refs = list()
|
|
408
|
+
if not isinstance(self._ref_assignment_id, list):
|
|
409
|
+
self._ref_assignment_id = [self._ref_assignment_id]
|
|
410
|
+
|
|
411
|
+
for ref_id in self._ref_assignment_id:
|
|
412
|
+
assignment_refs.append({"type": "assignment", "id": ref_id})
|
|
413
|
+
|
|
414
|
+
refs += assignment_refs
|
|
415
|
+
|
|
416
|
+
return refs
|
|
417
|
+
|
|
418
|
+
def prepare(self, operation=None, update=None, query_only=False, system_update=None, system_metadata=False):
|
|
419
|
+
"""
|
|
420
|
+
To dictionary for platform call
|
|
421
|
+
|
|
422
|
+
:param str operation: operation
|
|
423
|
+
:param update: update
|
|
424
|
+
:param bool query_only: query only
|
|
425
|
+
:param system_update: system update
|
|
426
|
+
:param system_metadata: True, if you want to change metadata system
|
|
427
|
+
:return: dict of the filter
|
|
428
|
+
:rtype: dict
|
|
429
|
+
"""
|
|
430
|
+
########
|
|
431
|
+
# json #
|
|
432
|
+
########
|
|
433
|
+
_json = dict()
|
|
434
|
+
|
|
435
|
+
if self.custom_filter is not None:
|
|
436
|
+
_json = self.__generate_custom_query()
|
|
437
|
+
return _json
|
|
438
|
+
|
|
439
|
+
_json["filter"] = self.__generate_query()
|
|
440
|
+
|
|
441
|
+
##################
|
|
442
|
+
# filter options #
|
|
443
|
+
##################
|
|
444
|
+
if not query_only:
|
|
445
|
+
if len(self.sort) > 0:
|
|
446
|
+
_json["sort"] = self.sort
|
|
447
|
+
|
|
448
|
+
self.__validate_page_size()
|
|
449
|
+
|
|
450
|
+
_json["page"] = self.page
|
|
451
|
+
_json["pageSize"] = self.page_size
|
|
452
|
+
_json["resource"] = self.resource
|
|
453
|
+
|
|
454
|
+
########
|
|
455
|
+
# join #
|
|
456
|
+
########
|
|
457
|
+
if self.join is not None:
|
|
458
|
+
_json["join"] = self.join
|
|
459
|
+
|
|
460
|
+
#####################
|
|
461
|
+
# operation or refs #
|
|
462
|
+
#####################
|
|
463
|
+
if self._ref_assignment or self._ref_task:
|
|
464
|
+
_json["references"] = {"operation": self._ref_op, "refs": self.__generate_ref_query()}
|
|
465
|
+
elif operation is not None:
|
|
466
|
+
if operation == "update":
|
|
467
|
+
if update:
|
|
468
|
+
_json[operation] = {"metadata": {"user": update}}
|
|
469
|
+
else:
|
|
470
|
+
_json[operation] = dict()
|
|
471
|
+
if system_metadata and system_update:
|
|
472
|
+
_json["systemSpace"] = True
|
|
473
|
+
_json[operation]["metadata"] = _json[operation].get("metadata", dict())
|
|
474
|
+
_json[operation]["metadata"]["system"] = system_update
|
|
475
|
+
elif operation == "delete":
|
|
476
|
+
_json[operation] = True
|
|
477
|
+
_json.pop("sort", None)
|
|
478
|
+
if self.resource == FiltersResource.ITEM:
|
|
479
|
+
_json.pop("page", None)
|
|
480
|
+
_json.pop("pageSize", None)
|
|
481
|
+
else:
|
|
482
|
+
raise exceptions.PlatformException(error="400", message="Unknown operation: {}".format(operation))
|
|
483
|
+
|
|
484
|
+
if self.context is not None:
|
|
485
|
+
_json["context"] = self.context
|
|
486
|
+
if self._system_space is not None:
|
|
487
|
+
_json["systemSpace"] = self._system_space
|
|
488
|
+
return _json
|
|
489
|
+
|
|
490
|
+
def print(self, indent=2):
|
|
491
|
+
print(json.dumps(self.prepare(), indent=indent))
|
|
492
|
+
|
|
493
|
+
def sort_by(self, field, value: FiltersOrderByDirection = FiltersOrderByDirection.ASCENDING):
|
|
494
|
+
"""
|
|
495
|
+
sort the filter
|
|
496
|
+
|
|
497
|
+
:param str field: field to sort by it
|
|
498
|
+
:param dl.FiltersOrderByDirection value: FiltersOrderByDirection.ASCENDING, FiltersOrderByDirection.DESCENDING
|
|
499
|
+
|
|
500
|
+
**Example**:
|
|
501
|
+
|
|
502
|
+
.. code-block:: python
|
|
503
|
+
|
|
504
|
+
filter.sort_by(field='metadata.user', values=dl.FiltersOrderByDirection.ASCENDING)
|
|
505
|
+
"""
|
|
506
|
+
if value not in [FiltersOrderByDirection.ASCENDING, FiltersOrderByDirection.DESCENDING]:
|
|
507
|
+
raise exceptions.PlatformException(error="400", message="Sort can be by ascending or descending order only")
|
|
508
|
+
self.sort[field] = value.value if isinstance(value, FiltersOrderByDirection) else value
|
|
509
|
+
|
|
510
|
+
def platform_url(self, resource) -> str:
|
|
511
|
+
"""
|
|
512
|
+
Build a url with filters param to open in web browser
|
|
513
|
+
|
|
514
|
+
:param str resource: dl entity to apply filter on. currently only supports dl.Dataset
|
|
515
|
+
:return: url string
|
|
516
|
+
:rtype: str
|
|
517
|
+
"""
|
|
518
|
+
_json = self.prepare()
|
|
519
|
+
# add the view option
|
|
520
|
+
_json["view"] = "icons"
|
|
521
|
+
# convert from enum to string
|
|
522
|
+
_json["resource"] = f'{_json["resource"]}'
|
|
523
|
+
# convert the dictionary to a json string
|
|
524
|
+
_json["dqlFilter"] = json.dumps(
|
|
525
|
+
{"filter": _json.pop("filter"), "join": _json.pop("join", None), "sort": _json.get("sort", None)}
|
|
526
|
+
)
|
|
527
|
+
# set the page size as the UI default
|
|
528
|
+
_json["pageSize"] = 100
|
|
529
|
+
_json["page"] = _json["page"]
|
|
530
|
+
# build the url for the dataset data browser
|
|
531
|
+
if isinstance(resource, entities.Dataset):
|
|
532
|
+
url = resource.platform_url + f"?{urllib.parse.urlencode(_json)}"
|
|
533
|
+
else:
|
|
534
|
+
raise NotImplementedError("Not implemented for resource type: {}".format(type(resource)))
|
|
535
|
+
return url
|
|
536
|
+
|
|
537
|
+
def open_in_web(self, resource):
|
|
538
|
+
"""
|
|
539
|
+
Open the filter in the platform data browser (in a new web browser)
|
|
540
|
+
|
|
541
|
+
:param str resource: dl entity to apply filter on. currently only supports dl.Dataset
|
|
542
|
+
"""
|
|
543
|
+
if isinstance(resource, entities.Dataset):
|
|
544
|
+
resource._client_api._open_in_web(url=self.platform_url(resource=resource))
|
|
545
|
+
else:
|
|
546
|
+
raise NotImplementedError("Not implemented for resource type: {}".format(type(resource)))
|
|
547
|
+
|
|
548
|
+
def save(self, project: entities.Project, filter_name: str):
|
|
549
|
+
"""
|
|
550
|
+
Save the current DQL filter to the project
|
|
551
|
+
|
|
552
|
+
:param project: dl.Project
|
|
553
|
+
:param filter_name: the saved filter's name
|
|
554
|
+
:return: True if success
|
|
555
|
+
"""
|
|
556
|
+
_json_filter = self.prepare()
|
|
557
|
+
shebang_dict = {
|
|
558
|
+
"type": "dql",
|
|
559
|
+
"shebang": "dataloop",
|
|
560
|
+
"metadata": {
|
|
561
|
+
"version": "1.0.0",
|
|
562
|
+
"system": {"mimetype": "dql"},
|
|
563
|
+
"dltype": "filter",
|
|
564
|
+
"filterFieldsState": [],
|
|
565
|
+
"resource": "items",
|
|
566
|
+
"filter": _json_filter.pop("filter"),
|
|
567
|
+
"join": _json_filter.pop("join"),
|
|
568
|
+
},
|
|
569
|
+
}
|
|
570
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
571
|
+
byte_io = io.BytesIO()
|
|
572
|
+
byte_io.name = filter_name
|
|
573
|
+
byte_io.write(json.dumps(shebang_dict).encode())
|
|
574
|
+
byte_io.seek(0)
|
|
575
|
+
b_dataset.items.upload(local_path=byte_io, remote_path="/.dataloop/dqlfilters/items", remote_name=filter_name)
|
|
576
|
+
return True
|
|
577
|
+
|
|
578
|
+
@classmethod
|
|
579
|
+
def load(cls, project: entities.Project, filter_name: str) -> "Filters":
|
|
580
|
+
"""
|
|
581
|
+
Load a saved filter from the project by name
|
|
582
|
+
|
|
583
|
+
:param project: dl.Project entity
|
|
584
|
+
:param filter_name: filter name
|
|
585
|
+
:return: dl.Filters
|
|
586
|
+
"""
|
|
587
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
588
|
+
f = entities.Filters(
|
|
589
|
+
custom_filter={
|
|
590
|
+
"filter": {"$and": [{"filename": f"/.dataloop/dqlfilters/items/{filter_name}"}]},
|
|
591
|
+
"page": 0,
|
|
592
|
+
"pageSize": 1000,
|
|
593
|
+
"resource": "items",
|
|
594
|
+
}
|
|
595
|
+
)
|
|
596
|
+
pages = b_dataset.items.list(filters=f)
|
|
597
|
+
if pages.items_count == 0:
|
|
598
|
+
raise exceptions.NotFound(
|
|
599
|
+
f"Saved filter not found: {filter_name}. Run `Filters.list()` to list existing filters"
|
|
600
|
+
)
|
|
601
|
+
with open(pages.items[0].download()) as f:
|
|
602
|
+
data = json.load(f)
|
|
603
|
+
custom_filter = data["metadata"]["filter"]
|
|
604
|
+
custom_filter["join"] = data["metadata"]["join"]
|
|
605
|
+
return cls(custom_filter=custom_filter)
|
|
606
|
+
|
|
607
|
+
@staticmethod
|
|
608
|
+
def list(project: entities.Project) -> list:
|
|
609
|
+
"""
|
|
610
|
+
List all saved filters for a project
|
|
611
|
+
:param project: dl.Project entity
|
|
612
|
+
:return: a list of all the saved filters' names
|
|
613
|
+
"""
|
|
614
|
+
b_dataset = project.datasets._get_binaries_dataset()
|
|
615
|
+
f = entities.Filters(use_defaults=False, field="dir", values="/.dataloop/dqlfilters/items")
|
|
616
|
+
pages = b_dataset.items.list(filters=f)
|
|
617
|
+
all_filter_items = list(pages.all())
|
|
618
|
+
names = [i.name for i in all_filter_items]
|
|
619
|
+
return names
|
|
620
|
+
|
|
621
|
+
@staticmethod
|
|
622
|
+
def _get_split_filters(dataset, filters, max_items, max_workers=4, max_depth=None) -> Generator[dict, None, None]:
|
|
623
|
+
"""
|
|
624
|
+
Generator that yields filter chunks for large datasets using a bounded
|
|
625
|
+
thread pool. Splits ranges by id until each subset holds <= max_items.
|
|
626
|
+
|
|
627
|
+
:param dataset: Dataset object to get filters for
|
|
628
|
+
:param filters: Base filters to apply
|
|
629
|
+
:param max_items: Maximum number of items per filter chunk
|
|
630
|
+
:param max_workers: Maximum number of threads for parallel processing
|
|
631
|
+
:param max_depth: Maximum depth of the filter tree. Default calculated by the formula: np.ceil(np.log2(count/max_items) + 3).
|
|
632
|
+
:yield: Filter payloads covering subsets of items
|
|
633
|
+
"""
|
|
634
|
+
if max_items <= 0:
|
|
635
|
+
raise ValueError("_get_split_filters : max_items must be greater than 0")
|
|
636
|
+
|
|
637
|
+
if filters is None:
|
|
638
|
+
filters = entities.Filters()
|
|
639
|
+
|
|
640
|
+
from_id, count = Filters._get_first_last_item(
|
|
641
|
+
items_repo=dataset.items, filters=filters, order_by_direction=FiltersOrderByDirection.ASCENDING
|
|
642
|
+
)
|
|
643
|
+
to_id, count = Filters._get_first_last_item(
|
|
644
|
+
items_repo=dataset.items, filters=filters, order_by_direction=FiltersOrderByDirection.DESCENDING
|
|
645
|
+
)
|
|
646
|
+
|
|
647
|
+
if from_id is None or to_id is None or count == 0:
|
|
648
|
+
return
|
|
649
|
+
|
|
650
|
+
max_depth = max_depth if max_depth is not None else np.ceil(np.log2(count / max_items) + 3)
|
|
651
|
+
|
|
652
|
+
def make_filter_dict(range_from_id, range_to_id, strict_from: bool = False):
|
|
653
|
+
fdict = copy.deepcopy(filters.prepare())
|
|
654
|
+
lower_op = "$gt" if strict_from else "$gte"
|
|
655
|
+
fdict["filter"].setdefault("$and", []).extend(
|
|
656
|
+
[{"id": {lower_op: range_from_id}}, {"id": {"$lte": range_to_id}}]
|
|
657
|
+
)
|
|
658
|
+
return fdict
|
|
659
|
+
|
|
660
|
+
def task(range_from_id, range_to_id, depth, strict_from: bool):
|
|
661
|
+
fdict = make_filter_dict(range_from_id, range_to_id, strict_from)
|
|
662
|
+
range_filters = entities.Filters(custom_filter=fdict, page_size=1)
|
|
663
|
+
actual_from, count = Filters._get_first_last_item(
|
|
664
|
+
dataset.items, range_filters, FiltersOrderByDirection.ASCENDING
|
|
665
|
+
)
|
|
666
|
+
if count == 0:
|
|
667
|
+
return ("none", None, None)
|
|
668
|
+
if count <= max_items or depth >= max_depth:
|
|
669
|
+
return ("yield", fdict, None)
|
|
670
|
+
actual_to, count = Filters._get_first_last_item(
|
|
671
|
+
dataset.items, range_filters, FiltersOrderByDirection.DESCENDING
|
|
672
|
+
)
|
|
673
|
+
if not actual_from or not actual_to or actual_from == actual_to:
|
|
674
|
+
return ("yield", fdict, None)
|
|
675
|
+
mid = Filters._get_middle_id(actual_from, actual_to)
|
|
676
|
+
if not mid or mid == actual_from or mid == actual_to:
|
|
677
|
+
return ("yield", fdict, None)
|
|
678
|
+
# Left child: [actual_from, mid] inclusive; Right child: (mid, actual_to] exclusive lower bound
|
|
679
|
+
return (
|
|
680
|
+
"split",
|
|
681
|
+
None,
|
|
682
|
+
(
|
|
683
|
+
(actual_from, mid, depth + 1, False), # left child includes lower bound
|
|
684
|
+
(mid, actual_to, depth + 1, True), # right child excludes midpoint
|
|
685
|
+
),
|
|
686
|
+
)
|
|
687
|
+
|
|
688
|
+
pending = deque([(from_id, to_id, 0, False)])
|
|
689
|
+
futures = set()
|
|
690
|
+
|
|
691
|
+
with ThreadPoolExecutor(max_workers=max_workers) as pool:
|
|
692
|
+
while futures or pending:
|
|
693
|
+
# Submit all pending tasks
|
|
694
|
+
while pending:
|
|
695
|
+
fr, to, d, strict = pending.popleft()
|
|
696
|
+
futures.add(pool.submit(task, fr, to, d, strict))
|
|
697
|
+
|
|
698
|
+
if not futures:
|
|
699
|
+
break
|
|
700
|
+
|
|
701
|
+
done, futures = wait(futures, return_when=FIRST_COMPLETED)
|
|
702
|
+
for fut in done:
|
|
703
|
+
try:
|
|
704
|
+
kind, fdict, ranges = fut.result()
|
|
705
|
+
except Exception as e:
|
|
706
|
+
logger.warning(f"split filters task failed: {e}")
|
|
707
|
+
continue
|
|
708
|
+
if kind == "yield" and fdict is not None:
|
|
709
|
+
yield fdict
|
|
710
|
+
elif kind == "split" and ranges is not None:
|
|
711
|
+
left, right = ranges
|
|
712
|
+
pending.append(left)
|
|
713
|
+
pending.append(right)
|
|
714
|
+
|
|
715
|
+
@staticmethod
|
|
716
|
+
def _get_first_last_item(
|
|
717
|
+
items_repo, filters, order_by_direction=FiltersOrderByDirection.ASCENDING
|
|
718
|
+
) -> Tuple[Optional[str], int]:
|
|
719
|
+
filters_dict = copy.deepcopy(filters.prepare())
|
|
720
|
+
filters_dict["sort"] = {"id": order_by_direction.value}
|
|
721
|
+
filters_dict["page"] = 0
|
|
722
|
+
filters_dict["pageSize"] = 1
|
|
723
|
+
cloned_filters = entities.Filters(custom_filter=filters_dict)
|
|
724
|
+
|
|
725
|
+
try:
|
|
726
|
+
pages = items_repo.list(filters=cloned_filters)
|
|
727
|
+
return (pages.items[0].id if pages.items else None, pages.items_count)
|
|
728
|
+
except Exception:
|
|
729
|
+
return None, 0
|
|
730
|
+
|
|
731
|
+
@staticmethod
|
|
732
|
+
def _get_middle_id(from_id, to_id):
|
|
733
|
+
"""Calculate middle ObjectId between two ObjectIds with sub-second precision.
|
|
734
|
+
|
|
735
|
+
Computes the midpoint in the full 12-byte ObjectId numeric space to avoid
|
|
736
|
+
second-level rounding inherent to datetime-based construction.
|
|
737
|
+
"""
|
|
738
|
+
try:
|
|
739
|
+
# Convert ObjectId strings to integers using base 16 (hexadecimal)
|
|
740
|
+
start_int = int(str(ObjectId(from_id)), base=16)
|
|
741
|
+
end_int = int(str(ObjectId(to_id)), base=16)
|
|
742
|
+
if start_int >= end_int:
|
|
743
|
+
return from_id
|
|
744
|
+
mid_int = (start_int + end_int) // 2
|
|
745
|
+
if mid_int <= start_int:
|
|
746
|
+
mid_int = start_int + 1
|
|
747
|
+
if mid_int > end_int:
|
|
748
|
+
mid_int = end_int
|
|
749
|
+
# Convert back to 12-byte ObjectId format
|
|
750
|
+
mid_bytes = mid_int.to_bytes(length=12, byteorder="big")
|
|
751
|
+
return str(ObjectId(mid_bytes))
|
|
752
|
+
except Exception:
|
|
753
|
+
return from_id # Fallback to from_id if calculation fails
|
|
754
|
+
|
|
755
|
+
|
|
756
|
+
class SingleFilter:
|
|
757
|
+
def __init__(self, field, values, operator: FiltersOperations = None):
|
|
758
|
+
self.field = field
|
|
759
|
+
self.values = values
|
|
760
|
+
self.operator = operator
|
|
761
|
+
|
|
762
|
+
@staticmethod
|
|
763
|
+
def __add_recursive(value):
|
|
764
|
+
if not value.endswith("*") and not os.path.splitext(value)[-1].startswith("."):
|
|
765
|
+
if value.endswith("/"):
|
|
766
|
+
value = value + "**"
|
|
767
|
+
else:
|
|
768
|
+
value = value + "/**"
|
|
769
|
+
return value
|
|
770
|
+
|
|
771
|
+
def prepare(self, recursive=False):
|
|
772
|
+
"""
|
|
773
|
+
To dictionary for platform call
|
|
774
|
+
|
|
775
|
+
:param recursive:recursive
|
|
776
|
+
"""
|
|
777
|
+
_json = dict()
|
|
778
|
+
values = self.values
|
|
779
|
+
|
|
780
|
+
if recursive and self.field == "filename":
|
|
781
|
+
if isinstance(values, str):
|
|
782
|
+
values = self.__add_recursive(value=values)
|
|
783
|
+
elif isinstance(values, list):
|
|
784
|
+
for i_value, value in enumerate(values):
|
|
785
|
+
values[i_value] = self.__add_recursive(value=value)
|
|
786
|
+
|
|
787
|
+
if self.operator is None:
|
|
788
|
+
_json[self.field] = values
|
|
789
|
+
else:
|
|
790
|
+
value = dict()
|
|
791
|
+
op = self.operator.value if isinstance(self.operator, FiltersOperations) else self.operator
|
|
792
|
+
value["${}".format(op)] = values
|
|
793
|
+
_json[self.field] = value
|
|
794
|
+
|
|
795
|
+
return _json
|
|
796
|
+
|
|
797
|
+
def print(self, indent=2):
|
|
798
|
+
print(json.dumps(self.prepare(), indent=indent))
|