dtlpy 1.102.14__py3-none-any.whl → 1.104.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +1 -0
- dtlpy/__version__.py +1 -1
- dtlpy/entities/__init__.py +1 -0
- dtlpy/entities/annotation.py +17 -3
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +13 -4
- dtlpy/entities/collection.py +39 -0
- dtlpy/entities/command.py +10 -5
- dtlpy/entities/compute.py +59 -6
- dtlpy/entities/dataset.py +9 -5
- dtlpy/entities/dpk.py +9 -9
- dtlpy/entities/execution.py +6 -0
- dtlpy/entities/filters.py +2 -2
- dtlpy/entities/integration.py +0 -1
- dtlpy/entities/item.py +56 -2
- dtlpy/entities/organization.py +5 -5
- dtlpy/ml/base_model_adapter.py +8 -8
- dtlpy/repositories/__init__.py +1 -0
- dtlpy/repositories/collections.py +296 -0
- dtlpy/repositories/downloader.py +2 -0
- dtlpy/repositories/features.py +9 -5
- dtlpy/repositories/integrations.py +52 -1
- dtlpy/repositories/items.py +10 -3
- dtlpy/repositories/pipelines.py +3 -9
- dtlpy/repositories/uploader.py +16 -4
- dtlpy/services/api_client.py +2 -2
- {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/METADATA +2 -2
- {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/RECORD +34 -33
- dtlpy/assets/__pycache__/__init__.cpython-310.pyc +0 -0
- {dtlpy-1.102.14.data → dtlpy-1.104.14.data}/scripts/dlp +0 -0
- {dtlpy-1.102.14.data → dtlpy-1.104.14.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.102.14.data → dtlpy-1.104.14.data}/scripts/dlp.py +0 -0
- {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/LICENSE +0 -0
- {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/WHEEL +0 -0
- {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.102.14.dist-info → dtlpy-1.104.14.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
from venv import logger
|
|
2
|
+
from dtlpy import entities, exceptions, repositories
|
|
3
|
+
from dtlpy.entities.dataset import Dataset
|
|
4
|
+
from dtlpy.entities.filters import FiltersMethod
|
|
5
|
+
from dtlpy.services.api_client import ApiClient
|
|
6
|
+
from typing import List
|
|
7
|
+
|
|
8
|
+
class Collections:
|
|
9
|
+
def __init__(self,
|
|
10
|
+
client_api: ApiClient,
|
|
11
|
+
item: entities.Item = None,
|
|
12
|
+
dataset: entities.Dataset = None
|
|
13
|
+
):
|
|
14
|
+
self._client_api = client_api
|
|
15
|
+
self._dataset = dataset
|
|
16
|
+
self._item = item
|
|
17
|
+
|
|
18
|
+
def create(self, name: str) -> entities.Collection:
|
|
19
|
+
"""
|
|
20
|
+
Creates a new collection in the dataset.
|
|
21
|
+
|
|
22
|
+
:param name: The name of the new collection.
|
|
23
|
+
:return: The created collection details.
|
|
24
|
+
"""
|
|
25
|
+
dataset_id = self._dataset.id
|
|
26
|
+
self.validate_max_collections()
|
|
27
|
+
self.validate_collection_name(name)
|
|
28
|
+
payload = {"name": name}
|
|
29
|
+
success, response = self._client_api.gen_request(
|
|
30
|
+
req_type="post", path=f"/datasets/{dataset_id}/items/collections", json_req=payload
|
|
31
|
+
)
|
|
32
|
+
if success:
|
|
33
|
+
collection_json = self._single_collection(data=response.json(), name=name)
|
|
34
|
+
return entities.Collection.from_json(client_api=self._client_api, _json=collection_json)
|
|
35
|
+
else:
|
|
36
|
+
raise exceptions.PlatformException(response)
|
|
37
|
+
|
|
38
|
+
def update(self, collection_name: str, new_name: str) -> entities.Collection:
|
|
39
|
+
"""
|
|
40
|
+
Updates the name of an existing collection.
|
|
41
|
+
|
|
42
|
+
:param collection_id: The ID of the collection to update.
|
|
43
|
+
:param new_name: The new name for the collection.
|
|
44
|
+
:return: The updated collection details.
|
|
45
|
+
"""
|
|
46
|
+
dataset_id = self._dataset.id
|
|
47
|
+
self.validate_collection_name(new_name)
|
|
48
|
+
payload = {"name": new_name}
|
|
49
|
+
success, response = self._client_api.gen_request(
|
|
50
|
+
req_type="patch", path=f"/datasets/{dataset_id}/items/collections/{collection_name}", json_req=payload
|
|
51
|
+
)
|
|
52
|
+
if success:
|
|
53
|
+
collection_json = self._single_collection(data=response.json(), name=new_name)
|
|
54
|
+
return entities.Collection.from_json(client_api=self._client_api, _json=collection_json)
|
|
55
|
+
else:
|
|
56
|
+
raise exceptions.PlatformException(response)
|
|
57
|
+
|
|
58
|
+
def delete(self, collection_name: str) -> bool:
|
|
59
|
+
"""
|
|
60
|
+
Deletes a collection from the dataset.
|
|
61
|
+
|
|
62
|
+
:param collection_name: The name of the collection to delete.
|
|
63
|
+
"""
|
|
64
|
+
dataset_id = self._dataset.id
|
|
65
|
+
success, response = self._client_api.gen_request(
|
|
66
|
+
req_type="delete", path=f"/datasets/{dataset_id}/items/collections/{collection_name}"
|
|
67
|
+
)
|
|
68
|
+
if success:
|
|
69
|
+
# Wait for the split operation to complete
|
|
70
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
71
|
+
client_api=self._client_api)
|
|
72
|
+
command.wait()
|
|
73
|
+
return True
|
|
74
|
+
else:
|
|
75
|
+
raise exceptions.PlatformException(response)
|
|
76
|
+
|
|
77
|
+
def clone(self, collection_name: str) -> dict:
|
|
78
|
+
"""
|
|
79
|
+
Clones an existing collection, creating a new one with a unique name.
|
|
80
|
+
|
|
81
|
+
:param collection_name: The name of the collection to clone.
|
|
82
|
+
:return: The cloned collection details as a dictionary.
|
|
83
|
+
"""
|
|
84
|
+
self.validate_max_collections()
|
|
85
|
+
collections = self.list_all_collections()
|
|
86
|
+
original_collection = next((c for c in collections if c["name"] == collection_name), None)
|
|
87
|
+
|
|
88
|
+
if not original_collection:
|
|
89
|
+
raise ValueError(f"Collection with name '{collection_name}' not found.")
|
|
90
|
+
|
|
91
|
+
source_name = original_collection["name"]
|
|
92
|
+
num = 0
|
|
93
|
+
clone_name = ""
|
|
94
|
+
while True:
|
|
95
|
+
num += 1
|
|
96
|
+
clone_name = f"{source_name}-clone-{num}"
|
|
97
|
+
if not any(c["name"] == clone_name for c in collections): # Use c["name"] for comparison
|
|
98
|
+
break
|
|
99
|
+
|
|
100
|
+
# Create the cloned collection
|
|
101
|
+
cloned_collection = self.create(name=clone_name)
|
|
102
|
+
self.assign(dataset_id=self._dataset.id, collections=[cloned_collection.name], collection_key=original_collection['key'])
|
|
103
|
+
return cloned_collection
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def list_all_collections(self) -> entities.Collection:
|
|
107
|
+
"""
|
|
108
|
+
Retrieves all collections in the dataset.
|
|
109
|
+
|
|
110
|
+
:return: A list of collections in the dataset.
|
|
111
|
+
"""
|
|
112
|
+
dataset_id = self._dataset.id
|
|
113
|
+
success, response = self._client_api.gen_request(
|
|
114
|
+
req_type="GET", path=f"/datasets/{dataset_id}/items/collections"
|
|
115
|
+
)
|
|
116
|
+
if success:
|
|
117
|
+
data = response.json()
|
|
118
|
+
return self._list_collections(data)
|
|
119
|
+
else:
|
|
120
|
+
raise exceptions.PlatformException(response)
|
|
121
|
+
|
|
122
|
+
def validate_collection_name(self, name: str):
|
|
123
|
+
"""
|
|
124
|
+
Validate that the collection name is unique.
|
|
125
|
+
|
|
126
|
+
:param name: The name of the collection to validate.
|
|
127
|
+
:raises ValueError: If a collection with the same name already exists.
|
|
128
|
+
"""
|
|
129
|
+
collections = self.list_all_collections()
|
|
130
|
+
if any(c["name"] == name for c in collections):
|
|
131
|
+
raise ValueError(f"A collection with the name '{name}' already exists.")
|
|
132
|
+
|
|
133
|
+
def validate_max_collections(self) -> None:
|
|
134
|
+
"""
|
|
135
|
+
Validates that the dataset has not exceeded the maximum allowed collections.
|
|
136
|
+
|
|
137
|
+
:raises ValueError: If the dataset has 10 or more collections.
|
|
138
|
+
"""
|
|
139
|
+
collections = self.list_all_collections()
|
|
140
|
+
if len(collections) >= 10:
|
|
141
|
+
raise ValueError("The dataset already has the maximum number of collections (10).")
|
|
142
|
+
|
|
143
|
+
def list_unassigned_items(self) -> list:
|
|
144
|
+
"""
|
|
145
|
+
List unassigned items in a dataset (items where all collection fields are false).
|
|
146
|
+
|
|
147
|
+
:return: List of unassigned item IDs
|
|
148
|
+
:rtype: list
|
|
149
|
+
"""
|
|
150
|
+
filters = entities.Filters(method=FiltersMethod.AND) # Use AND method for all conditions
|
|
151
|
+
collection_fields = [
|
|
152
|
+
"collections0",
|
|
153
|
+
"collections1",
|
|
154
|
+
"collections2",
|
|
155
|
+
"collections3",
|
|
156
|
+
"collections4",
|
|
157
|
+
"collections5",
|
|
158
|
+
"collections6",
|
|
159
|
+
"collections7",
|
|
160
|
+
"collections8",
|
|
161
|
+
"collections9",
|
|
162
|
+
]
|
|
163
|
+
|
|
164
|
+
# Add each field to the filter with a value of False
|
|
165
|
+
for field in collection_fields:
|
|
166
|
+
filters.add(field=field, values=False, method=FiltersMethod.AND)
|
|
167
|
+
|
|
168
|
+
missing_ids = []
|
|
169
|
+
pages = self._dataset.items.list(filters=filters)
|
|
170
|
+
for page in pages:
|
|
171
|
+
for item in page:
|
|
172
|
+
# Items that pass filters mean all collections are false
|
|
173
|
+
missing_ids.append(item.id)
|
|
174
|
+
|
|
175
|
+
return missing_ids
|
|
176
|
+
|
|
177
|
+
def assign(
|
|
178
|
+
self,
|
|
179
|
+
dataset_id: str,
|
|
180
|
+
collections: List[str],
|
|
181
|
+
item_id: str = None,
|
|
182
|
+
collection_key: str = None
|
|
183
|
+
) -> bool:
|
|
184
|
+
"""
|
|
185
|
+
Assign an item to a collection. Creates the collection if it does not exist.
|
|
186
|
+
|
|
187
|
+
:param dataset_id: ID of the dataset.
|
|
188
|
+
:param collections: List of the collections to assign the item to.
|
|
189
|
+
:param item_id: (Optional) ID of the item to assign. If not provided, all items in the dataset will be updated.
|
|
190
|
+
:param collection_key: (Optional) Key for the bulk assignment. If not provided, no specific metadata will be updated.
|
|
191
|
+
:return: True if the assignment was successful, otherwise raises an exception.
|
|
192
|
+
"""
|
|
193
|
+
# Build the query structure
|
|
194
|
+
if collection_key:
|
|
195
|
+
query = {
|
|
196
|
+
"filter": {
|
|
197
|
+
f"metadata.system.collections.{collection_key}": True
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
elif item_id:
|
|
201
|
+
query = {
|
|
202
|
+
"id": {"$eq": item_id}
|
|
203
|
+
}
|
|
204
|
+
else:
|
|
205
|
+
raise ValueError("Either collection_key or item_id must be provided.")
|
|
206
|
+
|
|
207
|
+
# Create the payload
|
|
208
|
+
payload = {
|
|
209
|
+
"query": query,
|
|
210
|
+
"collections": collections,
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
# Make the API request to assign the item
|
|
214
|
+
success, response = self._client_api.gen_request(
|
|
215
|
+
req_type="post",
|
|
216
|
+
path=f"/datasets/{dataset_id}/items/collections/bulk-add",
|
|
217
|
+
json_req=payload,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
if success:
|
|
221
|
+
# Wait for the operation to complete
|
|
222
|
+
command = entities.Command.from_json(_json=response.json(), client_api=self._client_api)
|
|
223
|
+
command.wait()
|
|
224
|
+
return True
|
|
225
|
+
else:
|
|
226
|
+
raise exceptions.PlatformException(f"Failed to assign item to collections: {response}")
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def unassign(self, dataset_id: str, item_id: str, collections: List[str]) -> bool:
|
|
230
|
+
"""
|
|
231
|
+
Unassign an item from a collection.
|
|
232
|
+
:param item_id: ID of the item.
|
|
233
|
+
:param collections: List of collection names to unassign.
|
|
234
|
+
"""
|
|
235
|
+
payload = {
|
|
236
|
+
"query": {"id": {"$eq": item_id}},
|
|
237
|
+
"collections": collections,
|
|
238
|
+
}
|
|
239
|
+
success, response = self._client_api.gen_request(
|
|
240
|
+
req_type="post",
|
|
241
|
+
path=f"/datasets/{dataset_id}/items/collections/bulk-remove",
|
|
242
|
+
json_req=payload,
|
|
243
|
+
)
|
|
244
|
+
if success:
|
|
245
|
+
# Wait for the split operation to complete
|
|
246
|
+
command = entities.Command.from_json(_json=response.json(),
|
|
247
|
+
client_api=self._client_api)
|
|
248
|
+
command.wait()
|
|
249
|
+
return True
|
|
250
|
+
else:
|
|
251
|
+
raise exceptions.PlatformException(response)
|
|
252
|
+
|
|
253
|
+
def _single_collection(sef, data: dict, name: str):
|
|
254
|
+
"""
|
|
255
|
+
Retrieves the key-value pair from the dictionary where the collection's name matches the given name.
|
|
256
|
+
|
|
257
|
+
:param data: A dictionary containing collection data in the format:
|
|
258
|
+
{ "metadata.system.collections.c0": {"name": "Justice League"}, ... }
|
|
259
|
+
:param name: The name of the collection to find.
|
|
260
|
+
:return: The key-value pair where the name matches, or None if not found.
|
|
261
|
+
"""
|
|
262
|
+
for key, value in data.items():
|
|
263
|
+
if value.get("name") == name:
|
|
264
|
+
return {key: value}
|
|
265
|
+
return None
|
|
266
|
+
|
|
267
|
+
def _list_collections(self, data: dict):
|
|
268
|
+
"""
|
|
269
|
+
Create a list of Collection entities from the dataset JSON.
|
|
270
|
+
|
|
271
|
+
:param data: The flat JSON containing collection data in the format:
|
|
272
|
+
{ "metadata.system.collections.c0": {"name": "Justice League"}, ... }
|
|
273
|
+
:return: A list of Collection entities.
|
|
274
|
+
"""
|
|
275
|
+
collections = []
|
|
276
|
+
for full_key, value in data.items():
|
|
277
|
+
if "metadata.system.collections" in full_key:
|
|
278
|
+
# Strip the prefix
|
|
279
|
+
key = full_key.replace("metadata.system.collections.", "")
|
|
280
|
+
collection_name = value.get("name")
|
|
281
|
+
collections.append({"key": key, "name": collection_name})
|
|
282
|
+
return collections
|
|
283
|
+
|
|
284
|
+
def get_name_by_key(self, key: str) -> str:
|
|
285
|
+
"""
|
|
286
|
+
Get the name of a collection by its key.
|
|
287
|
+
|
|
288
|
+
:param key: The key of the collection (e.g., 'c0', 'c1').
|
|
289
|
+
:return: The name of the collection if it exists; otherwise, an empty string.
|
|
290
|
+
"""
|
|
291
|
+
# Assuming collections is a list of dictionaries
|
|
292
|
+
collections = self.list_all_collections()
|
|
293
|
+
for collection in collections:
|
|
294
|
+
if collection.get("key") == key:
|
|
295
|
+
return collection.get("name", "")
|
|
296
|
+
return ""
|
dtlpy/repositories/downloader.py
CHANGED
|
@@ -674,6 +674,8 @@ class Downloader:
|
|
|
674
674
|
stream=True,
|
|
675
675
|
dataset_id=item.dataset_id)
|
|
676
676
|
if not result:
|
|
677
|
+
if os.path.isfile(local_filepath + '.download'):
|
|
678
|
+
os.remove(local_filepath + '.download')
|
|
677
679
|
raise PlatformException(response)
|
|
678
680
|
else:
|
|
679
681
|
_, ext = os.path.splitext(item.metadata['system']['shebang']['linkInfo']['ref'].split('?')[0])
|
dtlpy/repositories/features.py
CHANGED
|
@@ -17,9 +17,11 @@ class Features:
|
|
|
17
17
|
project_id: str = None,
|
|
18
18
|
item: entities.Item = None,
|
|
19
19
|
annotation: entities.Annotation = None,
|
|
20
|
-
feature_set: entities.FeatureSet = None
|
|
20
|
+
feature_set: entities.FeatureSet = None,
|
|
21
|
+
dataset: entities.Dataset = None):
|
|
21
22
|
if project is not None and project_id is None:
|
|
22
23
|
project_id = project.id
|
|
24
|
+
self._dataset = dataset
|
|
23
25
|
self._project = project
|
|
24
26
|
self._project_id = project_id
|
|
25
27
|
self._item = item
|
|
@@ -50,7 +52,7 @@ class Features:
|
|
|
50
52
|
if self._project is None:
|
|
51
53
|
raise exceptions.PlatformException(
|
|
52
54
|
error='2001',
|
|
53
|
-
message='Cannot perform action WITHOUT Project entity in
|
|
55
|
+
message='Cannot perform action WITHOUT Project entity in Features repository.'
|
|
54
56
|
' Please checkout or set a project')
|
|
55
57
|
assert isinstance(self._project, entities.Project)
|
|
56
58
|
return self._project
|
|
@@ -60,8 +62,8 @@ class Features:
|
|
|
60
62
|
###########
|
|
61
63
|
def _list(self, filters: entities.Filters):
|
|
62
64
|
"""
|
|
63
|
-
Get dataset
|
|
64
|
-
user is expected to perform another request then for every folder item to actually get the
|
|
65
|
+
Get dataset feature vectors list. This is a browsing endpoint, for any given path feature count will be returned,
|
|
66
|
+
user is expected to perform another request then for every folder item to actually get the item list.
|
|
65
67
|
|
|
66
68
|
:param dtlpy.entities.filters.Filters filters: Filters entity or a dictionary containing filters parameters
|
|
67
69
|
:return: json response
|
|
@@ -104,6 +106,8 @@ class Features:
|
|
|
104
106
|
filters.add(field='featureSetId', values=self._feature_set.id)
|
|
105
107
|
if self._item is not None:
|
|
106
108
|
filters.add(field='entityId', values=self._item.id)
|
|
109
|
+
if self._dataset is not None:
|
|
110
|
+
filters.add(field='datasetId', values=self._dataset.id)
|
|
107
111
|
if self._project_id is None:
|
|
108
112
|
self._project_id = self.project.id
|
|
109
113
|
filters.context = {"projects": [self._project_id]}
|
|
@@ -169,7 +173,7 @@ class Features:
|
|
|
169
173
|
if feature_set_id is None:
|
|
170
174
|
if self._feature_set is None:
|
|
171
175
|
raise ValueError(
|
|
172
|
-
'Missing feature_set_id. Must insert the variable or create from context
|
|
176
|
+
'Missing feature_set_id. Must insert the variable or create from context, e.g. feature_set.features.create()')
|
|
173
177
|
feature_set_id = self._feature_set.id
|
|
174
178
|
|
|
175
179
|
payload = {'project': project_id,
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Integrations Repository
|
|
3
3
|
"""
|
|
4
|
-
|
|
4
|
+
import base64
|
|
5
|
+
import json
|
|
5
6
|
import logging
|
|
6
7
|
from .. import entities, exceptions, miscellaneous, _api_reference
|
|
7
8
|
from ..services.api_client import ApiClient
|
|
@@ -298,3 +299,53 @@ class Integrations:
|
|
|
298
299
|
|
|
299
300
|
available_integrations = miscellaneous.List(response.json())
|
|
300
301
|
return available_integrations
|
|
302
|
+
|
|
303
|
+
def _create_private_registry_gar(self, service_account: str, location: str):
|
|
304
|
+
password = self.__create_gar_password(service_account, location)
|
|
305
|
+
return self.create(
|
|
306
|
+
integrations_type='private-registry',
|
|
307
|
+
name='gar-1',
|
|
308
|
+
metadata={"provider": "gcp"},
|
|
309
|
+
options={
|
|
310
|
+
"name": "_json_key",
|
|
311
|
+
"spec": {
|
|
312
|
+
"password": password
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
def __create_gar_password(self, service_account: str, location: str) -> str:
|
|
318
|
+
"""
|
|
319
|
+
Generates a Google Artifact Registry JSON configuration and returns it as a base64-encoded string.
|
|
320
|
+
|
|
321
|
+
Parameters:
|
|
322
|
+
location (str): The region where the repository will be created (e.g., 'us-central1').
|
|
323
|
+
service_account (str): The service_account parameter represents the Google Cloud service account credentials
|
|
324
|
+
in the form of a JSON key file. This JSON contains the private key and other metadata
|
|
325
|
+
required for authenticating with Google Artifact Registry. It is used to generate a Kubernetes secret
|
|
326
|
+
that stores the credentials for pulling container images from the registry.
|
|
327
|
+
The JSON key must include fields such as client_email, private_key, and project_id,
|
|
328
|
+
and it is typically downloaded from the Google Cloud Console when creating the service account
|
|
329
|
+
|
|
330
|
+
Returns:
|
|
331
|
+
str: A base64-encoded string representation of the repository JSON configuration.
|
|
332
|
+
"""
|
|
333
|
+
if not service_account:
|
|
334
|
+
raise ValueError('Missing Service Account')
|
|
335
|
+
if not location:
|
|
336
|
+
raise ValueError('Missing Location')
|
|
337
|
+
user_name = "_json_key"
|
|
338
|
+
cred = f"{user_name}:{service_account}"
|
|
339
|
+
auth = str(base64.b64encode(bytes(cred, 'utf-8')))[2:-1]
|
|
340
|
+
|
|
341
|
+
encoded_pass = {
|
|
342
|
+
"auths": {
|
|
343
|
+
f"{location}": {
|
|
344
|
+
"username": user_name,
|
|
345
|
+
"password": service_account,
|
|
346
|
+
"auth": auth
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
return str(base64.b64encode(bytes(json.dumps(encoded_pass), 'utf-8')))[2:-1]
|
dtlpy/repositories/items.py
CHANGED
|
@@ -271,7 +271,7 @@ class Items:
|
|
|
271
271
|
filters.pop(field='hidden')
|
|
272
272
|
if is_dir:
|
|
273
273
|
filters.add(field='type', values='dir')
|
|
274
|
-
|
|
274
|
+
filters.recursive = False
|
|
275
275
|
filters.add(field='filename', values=filepath)
|
|
276
276
|
paged_entity = self.list(filters=filters)
|
|
277
277
|
if len(paged_entity.items) == 0:
|
|
@@ -610,7 +610,9 @@ class Items:
|
|
|
610
610
|
output_entity=entities.Item,
|
|
611
611
|
no_output: bool = False,
|
|
612
612
|
export_version: str = entities.ExportVersion.V1,
|
|
613
|
-
item_description: str = None
|
|
613
|
+
item_description: str = None,
|
|
614
|
+
raise_on_error: bool = False,
|
|
615
|
+
return_as_list: bool = False
|
|
614
616
|
):
|
|
615
617
|
"""
|
|
616
618
|
Upload local file to dataset.
|
|
@@ -630,6 +632,9 @@ class Items:
|
|
|
630
632
|
:param bool no_output: do not return the items after upload
|
|
631
633
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
632
634
|
:param str item_description: add a string description to the uploaded item
|
|
635
|
+
:param bool raise_on_error: raise an exception if an error occurs
|
|
636
|
+
:param bool return_as_list: return a list of items instead of a generator
|
|
637
|
+
|
|
633
638
|
:return: Output (generator/single item)
|
|
634
639
|
:rtype: generator or single item
|
|
635
640
|
|
|
@@ -657,7 +662,9 @@ class Items:
|
|
|
657
662
|
# metadata to upload with items
|
|
658
663
|
item_metadata=item_metadata,
|
|
659
664
|
export_version=export_version,
|
|
660
|
-
item_description=item_description
|
|
665
|
+
item_description=item_description,
|
|
666
|
+
raise_on_error=raise_on_error,
|
|
667
|
+
return_as_list=return_as_list
|
|
661
668
|
)
|
|
662
669
|
|
|
663
670
|
@property
|
dtlpy/repositories/pipelines.py
CHANGED
|
@@ -376,24 +376,18 @@ class Pipelines:
|
|
|
376
376
|
|
|
377
377
|
pipeline = project.pipelines.update(pipeline='pipeline_entity')
|
|
378
378
|
"""
|
|
379
|
-
if pipeline.status == entities.CompositionStatus.INSTALLED and not pipeline.variables_changed():
|
|
380
|
-
raise exceptions.PlatformException(
|
|
381
|
-
error='400',
|
|
382
|
-
message='Cannot update pipeline while it is installed'
|
|
383
|
-
)
|
|
384
379
|
# payload
|
|
385
380
|
payload = pipeline.to_json()
|
|
386
381
|
|
|
387
382
|
# update settings
|
|
388
383
|
if pipeline.settings_changed():
|
|
389
|
-
self.update_settings(pipeline=pipeline, settings=pipeline.settings)
|
|
384
|
+
new_pipeline = self.update_settings(pipeline=pipeline, settings=pipeline.settings)
|
|
385
|
+
payload['settings'] = new_pipeline.to_json().get('settings', payload.get('settings'))
|
|
390
386
|
|
|
391
387
|
# update variables
|
|
392
388
|
if pipeline.variables_changed():
|
|
393
389
|
new_pipeline = self.__update_variables(pipeline=pipeline)
|
|
394
|
-
|
|
395
|
-
logger.warning('Pipeline is installed, updating pipeline variables only')
|
|
396
|
-
return new_pipeline
|
|
390
|
+
payload['variables'] = new_pipeline.to_json().get('variables', payload.get('variables'))
|
|
397
391
|
|
|
398
392
|
success, response = self._client_api.gen_request(
|
|
399
393
|
req_type='patch',
|
dtlpy/repositories/uploader.py
CHANGED
|
@@ -59,7 +59,9 @@ class Uploader:
|
|
|
59
59
|
overwrite=False,
|
|
60
60
|
item_metadata=None,
|
|
61
61
|
export_version: str = entities.ExportVersion.V1,
|
|
62
|
-
item_description=None
|
|
62
|
+
item_description=None,
|
|
63
|
+
raise_on_error=False,
|
|
64
|
+
return_as_list=False
|
|
63
65
|
):
|
|
64
66
|
"""
|
|
65
67
|
Upload local file to dataset.
|
|
@@ -67,7 +69,7 @@ class Uploader:
|
|
|
67
69
|
If `*` at the end of local_path (e.g. '/images/*') items will be uploaded without head directory
|
|
68
70
|
|
|
69
71
|
:param local_path: local file or folder to upload
|
|
70
|
-
:param local_annotations_path: path to
|
|
72
|
+
:param local_annotations_path: path to Dataloop format annotations json files.
|
|
71
73
|
:param remote_path: remote path to save.
|
|
72
74
|
:param remote_name: remote base name to save.
|
|
73
75
|
:param file_types: list of file type to upload. e.g ['.jpg', '.png']. default is all
|
|
@@ -75,6 +77,8 @@ class Uploader:
|
|
|
75
77
|
:param item_metadata: upload the items with the metadata dictionary
|
|
76
78
|
:param str export_version: exported items will have original extension in filename, `V1` - no original extension in filenames
|
|
77
79
|
:param str item_description: add a string description to the uploaded item
|
|
80
|
+
:param bool raise_on_error: raise an exception if an error occurs
|
|
81
|
+
:param bool return_as_list: always return a list of items
|
|
78
82
|
|
|
79
83
|
:return: Output (list)
|
|
80
84
|
"""
|
|
@@ -114,13 +118,21 @@ class Uploader:
|
|
|
114
118
|
if log_filepath is not None:
|
|
115
119
|
logger.warning("Errors in {n_error} files. See {log_filepath} for full log".format(
|
|
116
120
|
n_error=errors_count, log_filepath=log_filepath))
|
|
117
|
-
|
|
118
|
-
|
|
121
|
+
if raise_on_error is True:
|
|
122
|
+
raise PlatformException(error="400",
|
|
123
|
+
message=f"Errors in {errors_count} files. See above trace for more information")
|
|
124
|
+
|
|
125
|
+
if return_as_list is True:
|
|
126
|
+
# return list of items
|
|
127
|
+
return list(self.reporter.output)
|
|
119
128
|
if len(status_list) == 1:
|
|
129
|
+
# if there is only one item, return it
|
|
120
130
|
try:
|
|
121
131
|
return next(self.reporter.output)
|
|
122
132
|
except StopIteration:
|
|
133
|
+
# if there is no items, return None
|
|
123
134
|
return None
|
|
135
|
+
# if there are multiple items, return the generator
|
|
124
136
|
return self.reporter.output
|
|
125
137
|
|
|
126
138
|
def _build_elements_from_inputs(self,
|
dtlpy/services/api_client.py
CHANGED
|
@@ -45,7 +45,7 @@ threadLock = threading.Lock()
|
|
|
45
45
|
|
|
46
46
|
|
|
47
47
|
def format_message(message):
|
|
48
|
-
if message:
|
|
48
|
+
if message and isinstance(message, str):
|
|
49
49
|
return message.replace('\\n', '\n')
|
|
50
50
|
return message
|
|
51
51
|
|
|
@@ -1476,7 +1476,7 @@ class ApiClient:
|
|
|
1476
1476
|
msg += '[Response <{val}>]'.format(val=resp.status_code)
|
|
1477
1477
|
if hasattr(resp, 'reason'):
|
|
1478
1478
|
msg += '[Reason: {val}]'.format(val=resp.reason)
|
|
1479
|
-
if hasattr(resp, 'text'):
|
|
1479
|
+
if hasattr(resp, 'text') and isinstance(resp.text, str):
|
|
1480
1480
|
msg += '[Text: {val}]'.format(val=format_message(resp.text))
|
|
1481
1481
|
|
|
1482
1482
|
request_id = resp.headers.get('x-request-id', 'na')
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dtlpy
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.104.14
|
|
4
4
|
Summary: SDK and CLI for Dataloop platform
|
|
5
5
|
Home-page: https://github.com/dataloop-ai/dtlpy
|
|
6
6
|
Author: Dataloop Team
|
|
@@ -26,7 +26,7 @@ Requires-Dist: requests-toolbelt (>=1.0.0)
|
|
|
26
26
|
Requires-Dist: requests (>=2.25.0)
|
|
27
27
|
Requires-Dist: numpy (>=1.16.2)
|
|
28
28
|
Requires-Dist: pandas (>=0.24.2)
|
|
29
|
-
Requires-Dist: tabulate (
|
|
29
|
+
Requires-Dist: tabulate (>=0.8.9)
|
|
30
30
|
Requires-Dist: Pillow (>=7.2)
|
|
31
31
|
Requires-Dist: PyJWT (>=2.4)
|
|
32
32
|
Requires-Dist: jinja2 (>=2.11.3)
|