ciocore 8.2.0b1__py2.py3-none-any.whl → 8.3.0__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ciocore might be problematic. Click here for more details.
- ciocore/VERSION +1 -1
- ciocore/conductor_submit.py +71 -50
- ciocore/docsite/apidoc/hardware_set/index.html +0 -8
- ciocore/docsite/search/search_index.json +1 -1
- ciocore/docsite/sitemap.xml.gz +0 -0
- ciocore/hardware_set.py +0 -8
- {ciocore-8.2.0b1.dist-info → ciocore-8.3.0.dist-info}/METADATA +3 -3
- {ciocore-8.2.0b1.dist-info → ciocore-8.3.0.dist-info}/RECORD +13 -13
- tests/test_hardware_set.py +19 -28
- tests/test_submit.py +40 -16
- {ciocore-8.2.0b1.dist-info → ciocore-8.3.0.dist-info}/WHEEL +0 -0
- {ciocore-8.2.0b1.dist-info → ciocore-8.3.0.dist-info}/entry_points.txt +0 -0
- {ciocore-8.2.0b1.dist-info → ciocore-8.3.0.dist-info}/top_level.txt +0 -0
ciocore/VERSION
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
8.
|
|
1
|
+
8.3.0
|
ciocore/conductor_submit.py
CHANGED
|
@@ -17,6 +17,7 @@ logger = logging.getLogger(CONDUCTOR_LOGGER_NAME)
|
|
|
17
17
|
|
|
18
18
|
FEATURE_DEV = int(os.environ.get("CIO_FEATURE_DEV", 0))
|
|
19
19
|
|
|
20
|
+
|
|
20
21
|
class Submit(object):
|
|
21
22
|
"""Conductor Submission object."""
|
|
22
23
|
|
|
@@ -32,7 +33,7 @@ class Submit(object):
|
|
|
32
33
|
self.enforced_md5s = args.get("enforced_md5s", {})
|
|
33
34
|
self.database_filepath = args.get("database_filepath", "")
|
|
34
35
|
self.api_client = api_client.ApiClient()
|
|
35
|
-
|
|
36
|
+
|
|
36
37
|
self.progress_handler = None
|
|
37
38
|
self.uploader_ = None
|
|
38
39
|
|
|
@@ -69,15 +70,25 @@ class Submit(object):
|
|
|
69
70
|
self.payload[arg] = args[arg]
|
|
70
71
|
except KeyError:
|
|
71
72
|
if default is None:
|
|
72
|
-
logger.error(
|
|
73
|
+
logger.error(
|
|
74
|
+
"Submit: You must provide the '{}' argument.".format(arg)
|
|
75
|
+
)
|
|
73
76
|
raise
|
|
74
77
|
|
|
78
|
+
# If no upload paths are provided, make sure the backend does not expect a daemon to be running.
|
|
79
|
+
if not self.upload_paths:
|
|
80
|
+
self.payload["local_upload"] = True
|
|
81
|
+
|
|
75
82
|
# HACK: Posix -> Windows submission - must windowize output_path. Only available for
|
|
76
83
|
# developers. If a customer tries to submit from Mac to Windows, then they have access to
|
|
77
84
|
# Windows instances by mistake. Yes this code could get them out of a bind, but it will
|
|
78
85
|
# generate support tickets when they try to use the uploader daemon for example.
|
|
79
|
-
self.ensure_windows_drive_letters = FEATURE_DEV and self.payload[
|
|
80
|
-
|
|
86
|
+
self.ensure_windows_drive_letters = FEATURE_DEV and self.payload[
|
|
87
|
+
"instance_type"
|
|
88
|
+
].endswith("-w")
|
|
89
|
+
self.payload["output_path"] = self._ensure_windows_drive_letter(
|
|
90
|
+
self.payload["output_path"]
|
|
91
|
+
)
|
|
81
92
|
|
|
82
93
|
self.payload["notify"] = {"emails": self.payload["notify"]}
|
|
83
94
|
|
|
@@ -86,25 +97,25 @@ class Submit(object):
|
|
|
86
97
|
logger.debug("{}:{}".format(arg, self.payload[arg]))
|
|
87
98
|
|
|
88
99
|
def upload_progress_callback(self, upload_stats):
|
|
89
|
-
|
|
100
|
+
"""
|
|
90
101
|
Call the progress handler
|
|
91
|
-
|
|
92
|
-
|
|
102
|
+
"""
|
|
103
|
+
|
|
93
104
|
if self.progress_handler:
|
|
94
|
-
logger.debug("Sending progress update to {}".format(self.progress_handler))
|
|
105
|
+
logger.debug("Sending progress update to {}".format(self.progress_handler))
|
|
95
106
|
self.progress_handler(upload_stats)
|
|
96
107
|
|
|
97
108
|
def stop_work(self):
|
|
98
|
-
|
|
109
|
+
"""
|
|
99
110
|
Cancel the submission process
|
|
100
|
-
|
|
101
|
-
|
|
111
|
+
"""
|
|
112
|
+
|
|
102
113
|
logger.debug("Submitter was requested to stop work.")
|
|
103
114
|
|
|
104
115
|
if self.uploader_:
|
|
105
|
-
logger.debug("Uploader set to cancel.")
|
|
106
|
-
self.uploader_.cancel=True
|
|
107
|
-
|
|
116
|
+
logger.debug("Uploader set to cancel.")
|
|
117
|
+
self.uploader_.cancel = True
|
|
118
|
+
|
|
108
119
|
def main(self):
|
|
109
120
|
"""
|
|
110
121
|
Submit the job
|
|
@@ -116,37 +127,42 @@ class Submit(object):
|
|
|
116
127
|
2. local_upload=False: md5 calcs and uploads are performed on on any machine with access to
|
|
117
128
|
the filesystem on which the files reside, and by the same paths as the submission machine.
|
|
118
129
|
"""
|
|
119
|
-
|
|
120
|
-
self._log_threads(message_template="{thread_count} threads before starting upload")
|
|
121
130
|
|
|
122
|
-
|
|
123
|
-
|
|
131
|
+
self._log_threads(
|
|
132
|
+
message_template="{thread_count} threads before starting upload"
|
|
133
|
+
)
|
|
124
134
|
|
|
125
|
-
if self.
|
|
126
|
-
file_map = self._handle_local_upload(file_map)
|
|
127
|
-
|
|
128
|
-
elif self.enforced_md5s:
|
|
129
|
-
file_map = self._enforce_md5s(file_map)
|
|
135
|
+
if self.upload_paths:
|
|
130
136
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
self.payload["upload_files"].append(expanded)
|
|
134
|
-
self.payload["upload_size"] += expanded["st_size"]
|
|
137
|
+
processed_filepaths = file_utils.process_upload_filepaths(self.upload_paths)
|
|
138
|
+
file_map = {path: None for path in processed_filepaths}
|
|
135
139
|
|
|
140
|
+
if self.payload["local_upload"]:
|
|
141
|
+
file_map = self._handle_local_upload(file_map)
|
|
142
|
+
|
|
143
|
+
elif self.enforced_md5s:
|
|
144
|
+
file_map = self._enforce_md5s(file_map)
|
|
145
|
+
|
|
146
|
+
for path in file_map:
|
|
147
|
+
expanded = self._expand_stats(path, file_map[path])
|
|
148
|
+
self.payload["upload_files"].append(expanded)
|
|
149
|
+
self.payload["upload_size"] += expanded["st_size"]
|
|
136
150
|
|
|
137
151
|
self._log_threads(message_template="{thread_count} threads after upload")
|
|
138
|
-
|
|
152
|
+
|
|
139
153
|
logger.info("Sending Job...")
|
|
140
|
-
|
|
154
|
+
|
|
141
155
|
response, response_code = self.api_client.make_request(
|
|
142
|
-
uri_path="jobs/",
|
|
143
|
-
data=json.dumps(self.payload),
|
|
144
|
-
raise_on_error=False,
|
|
145
|
-
use_api_key=True
|
|
156
|
+
uri_path="jobs/",
|
|
157
|
+
data=json.dumps(self.payload),
|
|
158
|
+
raise_on_error=False,
|
|
159
|
+
use_api_key=True,
|
|
146
160
|
)
|
|
147
161
|
|
|
148
162
|
if response_code not in [201, 204]:
|
|
149
|
-
raise Exception(
|
|
163
|
+
raise Exception(
|
|
164
|
+
"Job Submission failed: Error %s ...\n%s" % (response_code, response)
|
|
165
|
+
)
|
|
150
166
|
|
|
151
167
|
return json.loads(response), response_code
|
|
152
168
|
|
|
@@ -158,11 +174,11 @@ class Submit(object):
|
|
|
158
174
|
"""
|
|
159
175
|
cfg = config.config().config
|
|
160
176
|
api_client.read_conductor_credentials(use_api_key=True)
|
|
161
|
-
|
|
177
|
+
|
|
162
178
|
# Don't use more threads than there are files
|
|
163
179
|
thread_count = min(len(file_map), cfg["thread_count"])
|
|
164
180
|
logger.info("Using {} threads for the uploader".format(thread_count))
|
|
165
|
-
|
|
181
|
+
|
|
166
182
|
uploader_args = {
|
|
167
183
|
"location": self.payload["location"],
|
|
168
184
|
"database_filepath": self.database_filepath,
|
|
@@ -173,18 +189,25 @@ class Submit(object):
|
|
|
173
189
|
self.uploader_ = uploader.Uploader(uploader_args)
|
|
174
190
|
|
|
175
191
|
self.uploader_.progress_callback = self.upload_progress_callback
|
|
176
|
-
|
|
192
|
+
|
|
177
193
|
self.uploader_.handle_upload_response(self.payload["project"], file_map)
|
|
178
|
-
|
|
194
|
+
|
|
179
195
|
if self.uploader_.cancel:
|
|
180
|
-
raise exceptions.UserCanceledError(
|
|
196
|
+
raise exceptions.UserCanceledError(
|
|
197
|
+
"Job submission was cancelled by the user"
|
|
198
|
+
)
|
|
181
199
|
|
|
182
200
|
if self.uploader_.error_messages:
|
|
183
201
|
error_message = ""
|
|
184
202
|
for cnt, err in enumerate(self.uploader_.error_messages):
|
|
185
|
-
error_message += "Error {}:\n{}\n\n".format(
|
|
186
|
-
|
|
187
|
-
|
|
203
|
+
error_message += "Error {}:\n{}\n\n".format(
|
|
204
|
+
cnt + 1, "".join(traceback.format_exception(*err))
|
|
205
|
+
)
|
|
206
|
+
raise Exception(
|
|
207
|
+
"\n\nCould not upload files, encountered %s errors:\n\n%s"
|
|
208
|
+
% (len(self.uploader_.error_messages), error_message)
|
|
209
|
+
)
|
|
210
|
+
|
|
188
211
|
# Get the resulting dictionary of the file's and their corresponding md5 hashes
|
|
189
212
|
upload_md5s = self.uploader_.return_md5s()
|
|
190
213
|
for path in upload_md5s:
|
|
@@ -199,13 +222,13 @@ class Submit(object):
|
|
|
199
222
|
|
|
200
223
|
Returns {"path1': enforced_md5_1, path2: enforced_md5_2}
|
|
201
224
|
"""
|
|
202
|
-
|
|
225
|
+
|
|
203
226
|
progress_title = "Processing MD5 of local files"
|
|
204
227
|
file_count = len(self.enforced_md5s)
|
|
205
228
|
|
|
206
229
|
for cnt, filepath in enumerate(self.enforced_md5s):
|
|
207
|
-
percentage_complete = float(cnt)/float(file_count)
|
|
208
|
-
|
|
230
|
+
percentage_complete = float(cnt) / float(file_count)
|
|
231
|
+
|
|
209
232
|
md5 = self.enforced_md5s[filepath]
|
|
210
233
|
logger.debug("filepath is %s" % filepath)
|
|
211
234
|
processed_filepaths = file_utils.process_upload_filepath(filepath)
|
|
@@ -216,7 +239,6 @@ class Submit(object):
|
|
|
216
239
|
|
|
217
240
|
return file_map
|
|
218
241
|
|
|
219
|
-
|
|
220
242
|
def _expand_stats(self, file, md5):
|
|
221
243
|
filestat = os.stat(file)
|
|
222
244
|
|
|
@@ -237,16 +259,15 @@ class Submit(object):
|
|
|
237
259
|
"st_mtime": filestat.st_mtime,
|
|
238
260
|
"st_ctime": filestat.st_ctime,
|
|
239
261
|
}
|
|
240
|
-
|
|
262
|
+
|
|
241
263
|
def _log_threads(self, message_template):
|
|
242
264
|
|
|
243
265
|
threads = list(threading.enumerate())
|
|
244
|
-
|
|
266
|
+
|
|
245
267
|
for t in threads:
|
|
246
268
|
logger.debug(t)
|
|
247
269
|
|
|
248
|
-
logger.debug(message_template.format(thread_count=len(threads)))
|
|
249
|
-
|
|
270
|
+
logger.debug(message_template.format(thread_count=len(threads)))
|
|
250
271
|
|
|
251
272
|
def _ensure_windows_drive_letter(self, filepath):
|
|
252
273
|
"""
|
|
@@ -1339,14 +1339,6 @@ HardwareSet </code>
|
|
|
1339
1339
|
<span class="k">else</span> <span class="p">[{</span><span class="s2">"label"</span><span class="p">:</span> <span class="s2">"CPU"</span><span class="p">,</span> <span class="s2">"order"</span><span class="p">:</span> <span class="mi">1</span><span class="p">}]</span>
|
|
1340
1340
|
<span class="p">)</span>
|
|
1341
1341
|
<span class="n">result</span><span class="p">[</span><span class="n">it</span><span class="p">[</span><span class="s2">"name"</span><span class="p">]]</span> <span class="o">=</span> <span class="n">it</span>
|
|
1342
|
-
<span class="n">result</span><span class="p">[</span><span class="s2">"best_fit"</span><span class="p">]</span> <span class="o">=</span> <span class="p">{</span><span class="s2">"name"</span><span class="p">:</span><span class="s2">"automatic_instance_type"</span><span class="p">,</span>
|
|
1343
|
-
<span class="s2">"description"</span><span class="p">:</span> <span class="s2">"Conductor will choose."</span><span class="p">,</span>
|
|
1344
|
-
<span class="s2">"categories"</span><span class="p">:</span> <span class="p">[{</span><span class="s2">"label"</span><span class="p">:</span> <span class="s2">"auto"</span><span class="p">,</span> <span class="s2">"order"</span><span class="p">:</span> <span class="mi">0</span><span class="p">}],</span>
|
|
1345
|
-
<span class="s2">"gpu"</span><span class="p">:</span> <span class="kc">None</span><span class="p">,</span>
|
|
1346
|
-
<span class="s2">"cores"</span><span class="p">:</span> <span class="mi">0</span><span class="p">,</span>
|
|
1347
|
-
<span class="s2">"memory"</span><span class="p">:</span> <span class="mi">0</span>
|
|
1348
|
-
<span class="p">}</span>
|
|
1349
|
-
|
|
1350
1342
|
|
|
1351
1343
|
<span class="k">return</span> <span class="n">result</span>
|
|
1352
1344
|
|