dtlpy 1.111.11__py3-none-any.whl → 1.112.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__version__.py +1 -1
- dtlpy/entities/integration.py +14 -7
- dtlpy/entities/paged_entities.py +80 -13
- dtlpy/ml/base_model_adapter.py +8 -9
- dtlpy/repositories/computes.py +14 -8
- dtlpy/repositories/downloader.py +3 -0
- dtlpy/repositories/drivers.py +11 -1
- dtlpy/repositories/features.py +1 -1
- dtlpy/repositories/integrations.py +33 -5
- {dtlpy-1.111.11.dist-info → dtlpy-1.112.9.dist-info}/METADATA +1 -1
- {dtlpy-1.111.11.dist-info → dtlpy-1.112.9.dist-info}/RECORD +19 -19
- tests/features/environment.py +2 -2
- {dtlpy-1.111.11.data → dtlpy-1.112.9.data}/scripts/dlp +0 -0
- {dtlpy-1.111.11.data → dtlpy-1.112.9.data}/scripts/dlp.bat +0 -0
- {dtlpy-1.111.11.data → dtlpy-1.112.9.data}/scripts/dlp.py +0 -0
- {dtlpy-1.111.11.dist-info → dtlpy-1.112.9.dist-info}/LICENSE +0 -0
- {dtlpy-1.111.11.dist-info → dtlpy-1.112.9.dist-info}/WHEEL +0 -0
- {dtlpy-1.111.11.dist-info → dtlpy-1.112.9.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.111.11.dist-info → dtlpy-1.112.9.dist-info}/top_level.txt +0 -0
dtlpy/__version__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = '1.
|
|
1
|
+
version = '1.112.9'
|
dtlpy/entities/integration.py
CHANGED
|
@@ -111,9 +111,12 @@ class Integration(entities.BaseEntity):
|
|
|
111
111
|
raise ValueError('Must input a valid Project entity')
|
|
112
112
|
self._project = project
|
|
113
113
|
|
|
114
|
-
def update(
|
|
115
|
-
|
|
116
|
-
|
|
114
|
+
def update(
|
|
115
|
+
self,
|
|
116
|
+
new_name: str = None,
|
|
117
|
+
new_options: dict = None,
|
|
118
|
+
reload_services: bool = None
|
|
119
|
+
):
|
|
117
120
|
"""
|
|
118
121
|
Update the integration's name.
|
|
119
122
|
|
|
@@ -121,6 +124,7 @@ class Integration(entities.BaseEntity):
|
|
|
121
124
|
|
|
122
125
|
:param str new_name: new name
|
|
123
126
|
:param dict new_options: new value
|
|
127
|
+
:param bool reload_services: reload services associated with this integration
|
|
124
128
|
:return: Integration object
|
|
125
129
|
:rtype: dtlpy.entities.integration.Integration
|
|
126
130
|
|
|
@@ -148,10 +152,13 @@ class Integration(entities.BaseEntity):
|
|
|
148
152
|
error='400',
|
|
149
153
|
message='Must provide an identifier in inputs')
|
|
150
154
|
|
|
151
|
-
identifier.integrations.update(
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
+
identifier.integrations.update(
|
|
156
|
+
new_name=new_name,
|
|
157
|
+
integrations_id=self.id,
|
|
158
|
+
integration=self,
|
|
159
|
+
new_options=new_options,
|
|
160
|
+
reload_services=reload_services
|
|
161
|
+
)
|
|
155
162
|
|
|
156
163
|
def delete(self,
|
|
157
164
|
sure: bool = False,
|
dtlpy/entities/paged_entities.py
CHANGED
|
@@ -6,6 +6,8 @@ import copy
|
|
|
6
6
|
import sys
|
|
7
7
|
|
|
8
8
|
import attr
|
|
9
|
+
|
|
10
|
+
from .filters import FiltersOperations, FiltersOrderByDirection, FiltersResource
|
|
9
11
|
from .. import miscellaneous
|
|
10
12
|
from ..services.api_client import ApiClient
|
|
11
13
|
|
|
@@ -29,6 +31,10 @@ class PagedEntities:
|
|
|
29
31
|
total_pages_count = attr.ib(default=0)
|
|
30
32
|
items_count = attr.ib(default=0)
|
|
31
33
|
|
|
34
|
+
# hybrid pagination
|
|
35
|
+
use_id_based_paging = attr.ib(default=False)
|
|
36
|
+
last_seen_id = attr.ib(default=None)
|
|
37
|
+
|
|
32
38
|
# execution attribute
|
|
33
39
|
_service_id = attr.ib(default=None, repr=False)
|
|
34
40
|
_project_id = attr.ib(default=None, repr=False)
|
|
@@ -43,6 +49,15 @@ class PagedEntities:
|
|
|
43
49
|
# items list
|
|
44
50
|
items = attr.ib(default=miscellaneous.List(), repr=False)
|
|
45
51
|
|
|
52
|
+
@staticmethod
|
|
53
|
+
def _has_explicit_sort(flt):
|
|
54
|
+
"""
|
|
55
|
+
Check if the filter has custom sort fields defined (not id/createdAt).
|
|
56
|
+
"""
|
|
57
|
+
prepared = flt.prepare() if flt else {}
|
|
58
|
+
sort_fields = list(prepared.get("sort", {}).keys())
|
|
59
|
+
return bool(sort_fields and sort_fields[0] not in {"id", "createdAt"})
|
|
60
|
+
|
|
46
61
|
def process_result(self, result):
|
|
47
62
|
"""
|
|
48
63
|
:param result: json object
|
|
@@ -71,7 +86,8 @@ class PagedEntities:
|
|
|
71
86
|
return self.items_count
|
|
72
87
|
|
|
73
88
|
def __iter__(self):
|
|
74
|
-
pbar = tqdm.tqdm(total=self.total_pages_count,
|
|
89
|
+
pbar = tqdm.tqdm(total=self.total_pages_count,
|
|
90
|
+
disable=self._client_api.verbose.disable_progress_bar_iterate_pages,
|
|
75
91
|
file=sys.stdout, desc="Iterate Pages")
|
|
76
92
|
if self.page_offset != 0:
|
|
77
93
|
# reset the count for page 0
|
|
@@ -109,18 +125,68 @@ class PagedEntities:
|
|
|
109
125
|
if page_offset is None:
|
|
110
126
|
page_offset = self.page_offset
|
|
111
127
|
|
|
112
|
-
if self.filters is
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
128
|
+
if self.filters is None:
|
|
129
|
+
raise ValueError("Cant return page. Filters is empty")
|
|
130
|
+
|
|
131
|
+
req = copy.deepcopy(self.filters)
|
|
132
|
+
req.page_size = page_size
|
|
133
|
+
|
|
134
|
+
after_id = getattr(req, "after_id", None)
|
|
135
|
+
if after_id is not None:
|
|
136
|
+
delattr(req, "after_id")
|
|
137
|
+
|
|
138
|
+
enable_hybrid = getattr(self.filters, "resource", None) in [
|
|
139
|
+
FiltersResource.ITEM,
|
|
140
|
+
FiltersResource.ANNOTATION,
|
|
141
|
+
FiltersResource.FEATURE,
|
|
142
|
+
]
|
|
143
|
+
|
|
144
|
+
if enable_hybrid and not self._has_explicit_sort(req):
|
|
145
|
+
req.sort_by(field="id", value=FiltersOrderByDirection.ASCENDING)
|
|
146
|
+
|
|
147
|
+
if enable_hybrid and self.use_id_based_paging:
|
|
148
|
+
req.page = 0
|
|
149
|
+
if self.last_seen_id:
|
|
150
|
+
req.add(
|
|
151
|
+
field="id",
|
|
152
|
+
values=self.last_seen_id,
|
|
153
|
+
operator=FiltersOperations.GREATER_THAN,
|
|
154
|
+
method=FiltersOperations.AND,
|
|
155
|
+
)
|
|
156
|
+
else:
|
|
157
|
+
auto_hybrid = (
|
|
158
|
+
enable_hybrid
|
|
159
|
+
and not self.use_id_based_paging
|
|
160
|
+
and not self._has_explicit_sort(self.filters)
|
|
161
|
+
and self.last_seen_id is not None
|
|
162
|
+
)
|
|
163
|
+
if auto_hybrid and page_offset > 0:
|
|
164
|
+
req.page = 0
|
|
165
|
+
req.add(
|
|
166
|
+
field="id",
|
|
167
|
+
values=after_id or self.last_seen_id,
|
|
168
|
+
operator=FiltersOperations.GREATER_THAN,
|
|
169
|
+
method=FiltersOperations.AND,
|
|
170
|
+
)
|
|
171
|
+
self.use_id_based_paging = True
|
|
118
172
|
else:
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
173
|
+
req.page = page_offset
|
|
174
|
+
|
|
175
|
+
if self._list_function is None:
|
|
176
|
+
result = self.items_repository._list(filters=req)
|
|
122
177
|
else:
|
|
123
|
-
|
|
178
|
+
result = self._list_function(filters=req)
|
|
179
|
+
|
|
180
|
+
items = self.process_result(result)
|
|
181
|
+
|
|
182
|
+
if enable_hybrid and items and hasattr(items[-1], "id"):
|
|
183
|
+
self.last_seen_id = items[-1].id
|
|
184
|
+
|
|
185
|
+
if self.use_id_based_paging:
|
|
186
|
+
if "hasNextPage" not in result:
|
|
187
|
+
self.has_next_page = len(items) == page_size
|
|
188
|
+
|
|
189
|
+
return items
|
|
124
190
|
|
|
125
191
|
def get_page(self, page_offset=None, page_size=None):
|
|
126
192
|
"""
|
|
@@ -164,7 +230,8 @@ class PagedEntities:
|
|
|
164
230
|
def all(self):
|
|
165
231
|
page_offset = 0
|
|
166
232
|
page_size = 100
|
|
167
|
-
pbar = tqdm.tqdm(total=self.items_count,
|
|
233
|
+
pbar = tqdm.tqdm(total=self.items_count,
|
|
234
|
+
disable=self._client_api.verbose.disable_progress_bar,
|
|
168
235
|
file=sys.stdout, desc='Iterate Entity')
|
|
169
236
|
total_pages = math.ceil(self.items_count / page_size)
|
|
170
237
|
jobs = list()
|
|
@@ -192,4 +259,4 @@ class PagedEntities:
|
|
|
192
259
|
self.items.print(columns=columns)
|
|
193
260
|
|
|
194
261
|
def to_df(self, columns=None):
|
|
195
|
-
return self.items.to_df(columns=columns)
|
|
262
|
+
return self.items.to_df(columns=columns)
|
dtlpy/ml/base_model_adapter.py
CHANGED
|
@@ -313,10 +313,9 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
313
313
|
self.logger.debug("Downloading subset {!r} of {}".format(subset,
|
|
314
314
|
self.model_entity.dataset.name))
|
|
315
315
|
|
|
316
|
-
annotation_filters =
|
|
317
|
-
|
|
318
|
-
|
|
316
|
+
annotation_filters = None
|
|
319
317
|
if self.model_entity.output_type is not None and self.model_entity.output_type != "embedding":
|
|
318
|
+
annotation_filters = entities.Filters(resource=entities.FiltersResource.ANNOTATION, use_defaults=False)
|
|
320
319
|
if self.model_entity.output_type in [entities.AnnotationType.SEGMENTATION,
|
|
321
320
|
entities.AnnotationType.POLYGON]:
|
|
322
321
|
model_output_types = [entities.AnnotationType.SEGMENTATION, entities.AnnotationType.POLYGON]
|
|
@@ -329,12 +328,12 @@ class BaseModelAdapter(utilities.BaseServiceRunner):
|
|
|
329
328
|
operator=entities.FiltersOperations.IN
|
|
330
329
|
)
|
|
331
330
|
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
331
|
+
if not self.configuration.get("include_model_annotations", False):
|
|
332
|
+
annotation_filters.add(
|
|
333
|
+
field="metadata.system.model.name",
|
|
334
|
+
values=False,
|
|
335
|
+
operator=entities.FiltersOperations.EXISTS
|
|
336
|
+
)
|
|
338
337
|
|
|
339
338
|
ret_list = dataset.items.download(filters=filters,
|
|
340
339
|
local_path=data_subset_base_path,
|
dtlpy/repositories/computes.py
CHANGED
|
@@ -133,25 +133,31 @@ class Computes:
|
|
|
133
133
|
def __get_log_compute_progress_callback(self, compute_id: str):
|
|
134
134
|
def func():
|
|
135
135
|
compute = self.get(compute_id=compute_id)
|
|
136
|
-
bootstrap_progress = compute.metadata.get('system', {}).get('
|
|
137
|
-
bootstrap_logs = compute.metadata.get('system', {}).get('
|
|
136
|
+
bootstrap_progress = compute.metadata.get('system', {}).get('bootstrap', {}).get('progress', None)
|
|
137
|
+
bootstrap_logs = compute.metadata.get('system', {}).get('bootstrap', {}).get('logs', None)
|
|
138
138
|
validation_progress = compute.metadata.get('system', {}).get('validation', {}).get('progress', None)
|
|
139
139
|
validation_logs = compute.metadata.get('system', {}).get('validation', {}).get('logs', None)
|
|
140
|
-
if bootstrap_progress not
|
|
141
|
-
|
|
140
|
+
if bootstrap_progress is not None:
|
|
141
|
+
if 'bootstrap' not in self.log_cache.get(compute_id, {}):
|
|
142
|
+
logger.info(f"Bootstrap in progress:")
|
|
142
143
|
last_index = len(self.log_cache.get(compute_id, {}).get('bootstrap', []))
|
|
143
144
|
new_logs = bootstrap_logs[last_index:]
|
|
144
145
|
if new_logs:
|
|
145
|
-
|
|
146
|
+
for log in new_logs:
|
|
147
|
+
logger.info(log)
|
|
148
|
+
logger.info(f'Bootstrap progress: {int(bootstrap_progress)}%')
|
|
146
149
|
if compute_id not in self.log_cache:
|
|
147
150
|
self.log_cache[compute_id] = {}
|
|
148
151
|
self.log_cache[compute_id]['bootstrap'] = bootstrap_logs
|
|
149
|
-
if
|
|
150
|
-
|
|
152
|
+
if bootstrap_progress in [100, None] and validation_progress is not None:
|
|
153
|
+
if 'validation' not in self.log_cache.get(compute_id, {}):
|
|
154
|
+
logger.info(f"Validating created compute:")
|
|
151
155
|
last_index = len(self.log_cache.get(compute_id, {}).get('validation', []))
|
|
152
156
|
new_logs = validation_logs[last_index:]
|
|
153
157
|
if new_logs:
|
|
154
|
-
|
|
158
|
+
for log in new_logs:
|
|
159
|
+
logger.info(log)
|
|
160
|
+
logger.info(f'Validation progress: {int(validation_progress)}%')
|
|
155
161
|
if compute_id not in self.log_cache:
|
|
156
162
|
self.log_cache[compute_id] = {}
|
|
157
163
|
self.log_cache[compute_id]['validation'] = validation_logs
|
dtlpy/repositories/downloader.py
CHANGED
|
@@ -96,6 +96,9 @@ class Downloader:
|
|
|
96
96
|
error='400',
|
|
97
97
|
message='Unknown annotation download option: {}, please choose from: {}'.format(
|
|
98
98
|
ann_option, list(entities.ViewAnnotationOptions)))
|
|
99
|
+
# normalize items argument: treat empty list as “no items specified”
|
|
100
|
+
if isinstance(items, list) and len(items) == 0:
|
|
101
|
+
items = None
|
|
99
102
|
#####################
|
|
100
103
|
# items to download #
|
|
101
104
|
#####################
|
dtlpy/repositories/drivers.py
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import logging
|
|
2
|
+
import re
|
|
2
3
|
|
|
3
4
|
from .. import entities, miscellaneous, exceptions, _api_reference
|
|
4
5
|
from ..services.api_client import ApiClient
|
|
@@ -151,7 +152,8 @@ class Drivers:
|
|
|
151
152
|
allow_external_delete: bool = True,
|
|
152
153
|
region: str = None,
|
|
153
154
|
storage_class: str = "",
|
|
154
|
-
path: str = ""
|
|
155
|
+
path: str = "",
|
|
156
|
+
endpoint: str = None):
|
|
155
157
|
"""
|
|
156
158
|
Create a storage driver.
|
|
157
159
|
|
|
@@ -167,6 +169,7 @@ class Drivers:
|
|
|
167
169
|
:param str region: relevant only for s3 - the bucket region
|
|
168
170
|
:param str storage_class: relevant only for s3
|
|
169
171
|
:param str path: Optional. By default path is the root folder. Path is case sensitive integration
|
|
172
|
+
:param endpoint path: Optional. Custom endpoint for S3 storage. Must be in the format 'http://<hostname>:<port>' or 'https://<hostname>:<port>'.
|
|
170
173
|
:return: driver object
|
|
171
174
|
:rtype: dtlpy.entities.driver.Driver
|
|
172
175
|
|
|
@@ -185,6 +188,11 @@ class Drivers:
|
|
|
185
188
|
integration_type = driver_type
|
|
186
189
|
if driver_type == entities.ExternalStorage.S3:
|
|
187
190
|
bucket_payload = 'bucketName'
|
|
191
|
+
if endpoint:
|
|
192
|
+
if not re.match(r'^https?://[A-Za-z0-9.-]+:\d+$', endpoint):
|
|
193
|
+
raise ValueError(
|
|
194
|
+
f"Invalid endpoint URL '{endpoint}'. Must be 'http://<hostname>:<port>' or 'https://<hostname>:<port>'."
|
|
195
|
+
)
|
|
188
196
|
elif driver_type == entities.ExternalStorage.GCS:
|
|
189
197
|
bucket_payload = 'bucket'
|
|
190
198
|
else:
|
|
@@ -208,6 +216,8 @@ class Drivers:
|
|
|
208
216
|
"allowExternalDelete": allow_external_delete,
|
|
209
217
|
"creator": self._client_api.info().get('user_email')
|
|
210
218
|
}
|
|
219
|
+
if endpoint and driver_type == entities.ExternalStorage.S3:
|
|
220
|
+
payload['payload']['endpoint'] = endpoint
|
|
211
221
|
|
|
212
222
|
success, response = self._client_api.gen_request(req_type='post',
|
|
213
223
|
path='/drivers',
|
dtlpy/repositories/features.py
CHANGED
|
@@ -111,7 +111,7 @@ class Features:
|
|
|
111
111
|
if self._project_id is None:
|
|
112
112
|
self._project_id = self.project.id
|
|
113
113
|
filters.context = {"projects": [self._project_id]}
|
|
114
|
-
|
|
114
|
+
|
|
115
115
|
paged = entities.PagedEntities(items_repository=self,
|
|
116
116
|
filters=filters,
|
|
117
117
|
page_offset=filters.page,
|
|
@@ -120,8 +120,10 @@ class Integrations:
|
|
|
120
120
|
aws-cross - {}
|
|
121
121
|
gcp-cross - {}
|
|
122
122
|
gcp-workload-identity-federation - {"secret": "", "content": "{}", "clientId": ""}
|
|
123
|
-
private-registry (ECR) -
|
|
124
|
-
private-registry (GAR) -
|
|
123
|
+
private-registry (ECR) - can use generate_ecr_options to generate the options
|
|
124
|
+
private-registry (GAR) - use generate_gar_options to generate the options
|
|
125
|
+
private-registry (ACR) - use generate_azure_container_registry_options to generate the options
|
|
126
|
+
private-registry (DockerHub) - use generate_docker_hub_options to generate the options
|
|
125
127
|
|
|
126
128
|
**Prerequisites**: You must be an *owner* in the organization.
|
|
127
129
|
|
|
@@ -180,6 +182,7 @@ class Integrations:
|
|
|
180
182
|
integration: entities.Integration = None,
|
|
181
183
|
new_options: dict = None,
|
|
182
184
|
organization_id: str = None,
|
|
185
|
+
reload_services: bool = None,
|
|
183
186
|
):
|
|
184
187
|
"""
|
|
185
188
|
Update the integration's name.
|
|
@@ -191,6 +194,7 @@ class Integrations:
|
|
|
191
194
|
:param Integration integration: integration object
|
|
192
195
|
:param dict new_options: new value
|
|
193
196
|
:param str organization_id: organization id
|
|
197
|
+
:param bool reload_services: reload services associated with this integration
|
|
194
198
|
:return: Integration object
|
|
195
199
|
:rtype: dtlpy.entities.integration.Integration
|
|
196
200
|
|
|
@@ -225,7 +229,16 @@ class Integrations:
|
|
|
225
229
|
else:
|
|
226
230
|
organization_id = self.org.id
|
|
227
231
|
|
|
228
|
-
|
|
232
|
+
if reload_services is None:
|
|
233
|
+
logger.warning(
|
|
234
|
+
"Param reload_services was not provided. If the integration you are updating is used\n"
|
|
235
|
+
"in FaaS services these services will keep using the old value until updated."
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
url_path = '/orgs/{org_id}/integrations{query_params}'.format(
|
|
239
|
+
org_id=organization_id,
|
|
240
|
+
query_params='?reloadServices=true' if reload_services else ''
|
|
241
|
+
)
|
|
229
242
|
payload = dict(integrationId=integrations_id if integrations_id is not None else integration.id)
|
|
230
243
|
if new_name is not None:
|
|
231
244
|
payload['name'] = new_name
|
|
@@ -355,6 +368,21 @@ class Integrations:
|
|
|
355
368
|
"""
|
|
356
369
|
return IntegrationUtils.generate_docker_hub_options(username=username, password=password, email=email)
|
|
357
370
|
|
|
371
|
+
@staticmethod
|
|
372
|
+
def generate_azure_container_registry_options(username: str, password: str, location: str) -> dict:
|
|
373
|
+
"""
|
|
374
|
+
Generates an Azure Container Registry JSON configuration and returns it as a base64-encoded string.
|
|
375
|
+
|
|
376
|
+
Parameters:
|
|
377
|
+
username (str): The Azure username.
|
|
378
|
+
password (str): The Azure password.
|
|
379
|
+
location (str): server URL of Azure Container Registry
|
|
380
|
+
|
|
381
|
+
Returns:
|
|
382
|
+
str: A base64-encoded string representation of the repository JSON configuration.
|
|
383
|
+
"""
|
|
384
|
+
return IntegrationUtils.generate_docker_hub_options(username=username, password=password, location=location)
|
|
385
|
+
|
|
358
386
|
@staticmethod
|
|
359
387
|
def generate_ecr_options(access_key_id: str, secret_access_key: str, account: str, region: str) -> dict:
|
|
360
388
|
"""
|
|
@@ -426,7 +454,7 @@ class IntegrationUtils:
|
|
|
426
454
|
)
|
|
427
455
|
|
|
428
456
|
@staticmethod
|
|
429
|
-
def generate_docker_hub_options(username: str, password: str, email: str = None) -> dict:
|
|
457
|
+
def generate_docker_hub_options(username: str, password: str, email: str = None, location='docker.io') -> dict:
|
|
430
458
|
|
|
431
459
|
if not username:
|
|
432
460
|
raise ValueError('Missing Username')
|
|
@@ -436,7 +464,7 @@ class IntegrationUtils:
|
|
|
436
464
|
auth = IntegrationUtils.encode('{}:{}'.format(username, password))
|
|
437
465
|
|
|
438
466
|
return IntegrationUtils.generate_json_key_options(
|
|
439
|
-
location=
|
|
467
|
+
location=location,
|
|
440
468
|
username=username,
|
|
441
469
|
password=password,
|
|
442
470
|
auth=auth,
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
dtlpy/__init__.py,sha256=-5fpi-yAwFdluh8QZ-sWXwNDCD97Q5BCgIs7pUDl04o,20444
|
|
2
|
-
dtlpy/__version__.py,sha256=
|
|
2
|
+
dtlpy/__version__.py,sha256=XIkrZL_C-sKZYUlGAHoBjRZoQrldqJ58ORqW_bv34Rg,20
|
|
3
3
|
dtlpy/exceptions.py,sha256=EQCKs3pwhwZhgMByQN3D3LpWpdxwcKPEEt-bIaDwURM,2871
|
|
4
4
|
dtlpy/new_instance.py,sha256=tUCzBGaSpm9GTjRuwOkFgo3A8vopUQ-baltdJss3XlI,9964
|
|
5
5
|
dtlpy/assets/__init__.py,sha256=D_hAa6NM8Zoy32sF_9b7m0b7I-BQEyBFg8-9Tg2WOeo,976
|
|
@@ -66,7 +66,7 @@ dtlpy/entities/feature.py,sha256=9fFjD0W57anOVSAVU55ypxN_WTCsWTG03Wkc3cAAj78,373
|
|
|
66
66
|
dtlpy/entities/feature_set.py,sha256=niw4MkmrDbD_LWQu1X30uE6U4DCzmFhPTaYeZ6VZDB0,4443
|
|
67
67
|
dtlpy/entities/filters.py,sha256=Cdx3BzYa8kIfvW37Gmmwiu4eH4ytfWByu8TQOBvtR2o,22644
|
|
68
68
|
dtlpy/entities/gis_item.py,sha256=Uk-wMBxwcHsImjz4qOjP-EyZAohbRzN43kMpCaVjCXU,3982
|
|
69
|
-
dtlpy/entities/integration.py,sha256=
|
|
69
|
+
dtlpy/entities/integration.py,sha256=XraOApW9jbT6EdZraRX2In6sMbfNgEGf2V5Um2RCRqA,6001
|
|
70
70
|
dtlpy/entities/item.py,sha256=WCIPHUmubIe0wva-YMm-LPQdn2S3_-Q151x49C9NEw8,34591
|
|
71
71
|
dtlpy/entities/label.py,sha256=ycDYavIgKhz806plIX-64c07_TeHpDa-V7LnfFVe4Rg,3869
|
|
72
72
|
dtlpy/entities/links.py,sha256=FAmEwHtsrqKet3c0UHH9u_gHgG6_OwF1-rl4xK7guME,2516
|
|
@@ -80,7 +80,7 @@ dtlpy/entities/package_defaults.py,sha256=wTD7Z7rGYjVy8AcUxTFEnkOkviiJaLVZYvduiU
|
|
|
80
80
|
dtlpy/entities/package_function.py,sha256=M42Kvw9A8b6msAkv-wRNAQg_-UC2bejniCjeKDugudc,6314
|
|
81
81
|
dtlpy/entities/package_module.py,sha256=cOkIITATkzzCQpE0sdPiBUisAz8ImlPG2YGZ0K7SypA,5151
|
|
82
82
|
dtlpy/entities/package_slot.py,sha256=XBwCodQe618sQm0bmx46Npo94mEk-zUV7ZX0mDRcsD8,3946
|
|
83
|
-
dtlpy/entities/paged_entities.py,sha256=
|
|
83
|
+
dtlpy/entities/paged_entities.py,sha256=ffw0CbLcOTNDYLQA9gqmjSaTZLRYP_tMnSfa_BmGIyk,8145
|
|
84
84
|
dtlpy/entities/pipeline.py,sha256=JtWGoCUhVszOVkBNK43fbTt446fkND4wH-Y-fN_llww,20851
|
|
85
85
|
dtlpy/entities/pipeline_execution.py,sha256=EQhW4W_G1bIPShYbJSAT--1WNQuvxVQbcQ_MCHIX0KI,9938
|
|
86
86
|
dtlpy/entities/project.py,sha256=ZUx8zA3mr6N145M62R3UDPCCzO1vxfyWO6vjES-bO-g,14653
|
|
@@ -149,7 +149,7 @@ dtlpy/miscellaneous/list_print.py,sha256=fBGTMXFUwDG8DD4W6HyR8BTGtbTckLf4W09quNR
|
|
|
149
149
|
dtlpy/miscellaneous/zipping.py,sha256=JplTc8UDFvO8WaD5vKuumVLN0lU_-GtHoE0doWKtmKg,5383
|
|
150
150
|
dtlpy/ml/__init__.py,sha256=vPkyXpc9kcWWZ_PxyPEOsjKBJdEbowLkZr8FZIb_OBM,799
|
|
151
151
|
dtlpy/ml/base_feature_extractor_adapter.py,sha256=iiEGYAx0Rdn4K46H_FlKrAv3ebTXHSxNVAmio0BxhaI,1178
|
|
152
|
-
dtlpy/ml/base_model_adapter.py,sha256=
|
|
152
|
+
dtlpy/ml/base_model_adapter.py,sha256=E7OktF1WbquvgyZixvPkyq7QW0ID3VF9tevXlwpmnuY,51216
|
|
153
153
|
dtlpy/ml/metrics.py,sha256=BG2E-1Mvjv2e2No9mIJKVmvzqBvLqytKcw3hA7wVUNc,20037
|
|
154
154
|
dtlpy/ml/predictions_utils.py,sha256=He_84U14oS2Ss7T_-Zj5GDiBZwS-GjMPURUh7u7DjF8,12484
|
|
155
155
|
dtlpy/ml/summary_writer.py,sha256=dehDi8zmGC1sAGyy_3cpSWGXoGQSiQd7bL_Thoo8yIs,2784
|
|
@@ -165,15 +165,15 @@ dtlpy/repositories/codebases.py,sha256=pvcZxdrq0-zWysVbdXjUOhnfcF6hJD8v5VclNZ-zh
|
|
|
165
165
|
dtlpy/repositories/collections.py,sha256=z-nkR33rq-MzkEff7DDSBlfsI_lkCDFwQZIlMaIT5rM,13514
|
|
166
166
|
dtlpy/repositories/commands.py,sha256=MgXhXxbAzBa2QJM9Z5EsQZRaZ4fGBM17ALoldxi8xYA,5848
|
|
167
167
|
dtlpy/repositories/compositions.py,sha256=H417BvlQAiWr5NH2eANFke6CfEO5o7DSvapYpf7v5Hk,2150
|
|
168
|
-
dtlpy/repositories/computes.py,sha256=
|
|
168
|
+
dtlpy/repositories/computes.py,sha256=V8kVTwXc5lhxrp5e7zxTXvKcVKtg6crCqkL5zQHtKZo,14639
|
|
169
169
|
dtlpy/repositories/datasets.py,sha256=p0HBbTGrxAQ8h9tJsp1jRasPbwnMAtXQ4_sIef9_590,59358
|
|
170
|
-
dtlpy/repositories/downloader.py,sha256=
|
|
170
|
+
dtlpy/repositories/downloader.py,sha256=X5-vspCoTW7_QZuPdaZgOSTvM7jYU0Uf7o5PELZNY9g,45329
|
|
171
171
|
dtlpy/repositories/dpks.py,sha256=dxZpGloZGH6MJG9ZFff5l3GlXw6i-52n9kxL-QiHosQ,18516
|
|
172
|
-
dtlpy/repositories/drivers.py,sha256=
|
|
172
|
+
dtlpy/repositories/drivers.py,sha256=2fMzzt0ovNeYpfrAOqz4h14C5D7GCLLA5SDj9rQ4UfI,10817
|
|
173
173
|
dtlpy/repositories/executions.py,sha256=4UoU6bnB3kl5cMuF1eJvDecfZCaB06gKWxPfv6_g1_k,32598
|
|
174
174
|
dtlpy/repositories/feature_sets.py,sha256=UowMDAl_CRefRB5oZzubnsjU_OFgiPPdQXn8q2j4Kuw,9666
|
|
175
|
-
dtlpy/repositories/features.py,sha256=
|
|
176
|
-
dtlpy/repositories/integrations.py,sha256=
|
|
175
|
+
dtlpy/repositories/features.py,sha256=HZR-sLSdwiWdbFsnuZrTDSff0oRK2hwFBQ6UK2yVAvk,9923
|
|
176
|
+
dtlpy/repositories/integrations.py,sha256=Y5c37fQCaIkw1p5jPEbAqytgRVXuqe771eHC1hNDE7A,19491
|
|
177
177
|
dtlpy/repositories/items.py,sha256=S1OWZ6s8AbVXMiLtCfBBiYPMG8OLqdUhKMHuZWE3bnU,40029
|
|
178
178
|
dtlpy/repositories/messages.py,sha256=QU0Psckg6CA_Tlw9AVxqa-Ay1fRM4n269sSIJkH9o7E,3066
|
|
179
179
|
dtlpy/repositories/models.py,sha256=uYVw319dMgVoXReb9VKl0b3v0_kgetROQaf56cvgwqs,38297
|
|
@@ -226,19 +226,19 @@ dtlpy/utilities/reports/report.py,sha256=3nEsNnIWmdPEsd21nN8vMMgaZVcPKn9iawKTTeO
|
|
|
226
226
|
dtlpy/utilities/videos/__init__.py,sha256=SV3w51vfPuGBxaMeNemx6qEMHw_C4lLpWNGXMvdsKSY,734
|
|
227
227
|
dtlpy/utilities/videos/video_player.py,sha256=LCxg0EZ_DeuwcT7U_r7MRC6Q19s0xdFb7x5Gk39PRms,24072
|
|
228
228
|
dtlpy/utilities/videos/videos.py,sha256=Dj916B4TQRIhI7HZVevl3foFrCsPp0eeWwvGbgX3-_A,21875
|
|
229
|
-
dtlpy-1.
|
|
230
|
-
dtlpy-1.
|
|
231
|
-
dtlpy-1.
|
|
229
|
+
dtlpy-1.112.9.data/scripts/dlp,sha256=-F0vSCWuSOOtgERAtsPMPyMmzitjhB7Yeftg_PDlDjw,10
|
|
230
|
+
dtlpy-1.112.9.data/scripts/dlp.bat,sha256=QOvx8Dlx5dUbCTMpwbhOcAIXL1IWmgVRSboQqDhIn3A,37
|
|
231
|
+
dtlpy-1.112.9.data/scripts/dlp.py,sha256=ZpfJvYE1_OTSorEYBphqTOutnHSb5TqOXh0y_mUCTJs,4393
|
|
232
232
|
tests/assets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
233
233
|
tests/assets/models_flow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
234
234
|
tests/assets/models_flow/failedmain.py,sha256=n8F4eu_u7JPrJ1zedbJPvv9e3lHb3ihoErqrBIcseEc,1847
|
|
235
235
|
tests/assets/models_flow/main.py,sha256=vnDKyVZaae2RFpvwS22Hzi6Dt2LJerH4yQrmKtaT8_g,2123
|
|
236
236
|
tests/assets/models_flow/main_model.py,sha256=Hl_tv7Q6KaRL3yLkpUoLMRqu5-ab1QsUYPL6RPEoamw,2042
|
|
237
237
|
tests/features/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
238
|
-
tests/features/environment.py,sha256=
|
|
239
|
-
dtlpy-1.
|
|
240
|
-
dtlpy-1.
|
|
241
|
-
dtlpy-1.
|
|
242
|
-
dtlpy-1.
|
|
243
|
-
dtlpy-1.
|
|
244
|
-
dtlpy-1.
|
|
238
|
+
tests/features/environment.py,sha256=ZZNSN8TObnNMkX0IQhSolAs_9I_V9hHFL_IZjG0jrGU,18909
|
|
239
|
+
dtlpy-1.112.9.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
240
|
+
dtlpy-1.112.9.dist-info/METADATA,sha256=2C1bQeEFV6GqQlo05c8mbX3WL-lWjZbydg5CkrP739I,5469
|
|
241
|
+
dtlpy-1.112.9.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
|
|
242
|
+
dtlpy-1.112.9.dist-info/entry_points.txt,sha256=C4PyKthCs_no88HU39eioO68oei64STYXC2ooGZTc4Y,43
|
|
243
|
+
dtlpy-1.112.9.dist-info/top_level.txt,sha256=ZWuLmQGUOtWAdgTf4Fbx884w1o0vBYq9dEc1zLv9Mig,12
|
|
244
|
+
dtlpy-1.112.9.dist-info/RECORD,,
|
tests/features/environment.py
CHANGED
|
@@ -294,10 +294,10 @@ def after_tag(context, tag):
|
|
|
294
294
|
pass
|
|
295
295
|
elif tag == 'wip':
|
|
296
296
|
pass
|
|
297
|
-
elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED']):
|
|
297
|
+
elif any(i_tag in tag for i_tag in ['DAT-', 'qa-', 'rc_only', 'skip_test', 'ATP', 'AIRGAPPED', 'DM-cache']):
|
|
298
298
|
pass
|
|
299
299
|
else:
|
|
300
|
-
raise ValueError('
|
|
300
|
+
raise ValueError('Unknown tag: {}'.format(tag))
|
|
301
301
|
|
|
302
302
|
|
|
303
303
|
@fixture
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|