ciocore 8.2.0b2__py2.py3-none-any.whl → 8.3.0b1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ciocore might be problematic. Click here for more details.

ciocore/VERSION CHANGED
@@ -1 +1 @@
1
- 8.2.0-beta.2
1
+ 8.3.0-beta.1
ciocore/api_client.py CHANGED
@@ -500,7 +500,7 @@ def request_instance_types(as_dict=False):
500
500
  """
501
501
  api = ApiClient()
502
502
  response, response_code = api.make_request(
503
- "api/v2/instance-types", use_api_key=True, raise_on_error=False
503
+ "api/v1/instance-types", use_api_key=True, raise_on_error=False
504
504
  )
505
505
  if response_code not in (200,):
506
506
  msg = "Failed to get instance types"
@@ -17,6 +17,7 @@ logger = logging.getLogger(CONDUCTOR_LOGGER_NAME)
17
17
 
18
18
  FEATURE_DEV = int(os.environ.get("CIO_FEATURE_DEV", 0))
19
19
 
20
+
20
21
  class Submit(object):
21
22
  """Conductor Submission object."""
22
23
 
@@ -32,7 +33,7 @@ class Submit(object):
32
33
  self.enforced_md5s = args.get("enforced_md5s", {})
33
34
  self.database_filepath = args.get("database_filepath", "")
34
35
  self.api_client = api_client.ApiClient()
35
-
36
+
36
37
  self.progress_handler = None
37
38
  self.uploader_ = None
38
39
 
@@ -69,15 +70,25 @@ class Submit(object):
69
70
  self.payload[arg] = args[arg]
70
71
  except KeyError:
71
72
  if default is None:
72
- logger.error("Submit: You must provide the '{}' argument.".format(arg))
73
+ logger.error(
74
+ "Submit: You must provide the '{}' argument.".format(arg)
75
+ )
73
76
  raise
74
77
 
78
+ # If no upload paths are provided, make sure the backend does not expect a daemon to be running.
79
+ if not self.upload_paths:
80
+ self.payload["local_upload"] = True
81
+
75
82
  # HACK: Posix -> Windows submission - must windowize output_path. Only available for
76
83
  # developers. If a customer tries to submit from Mac to Windows, then they have access to
77
84
  # Windows instances by mistake. Yes this code could get them out of a bind, but it will
78
85
  # generate support tickets when they try to use the uploader daemon for example.
79
- self.ensure_windows_drive_letters = FEATURE_DEV and self.payload["instance_type"].endswith("-w")
80
- self.payload["output_path"] = self._ensure_windows_drive_letter(self.payload["output_path"])
86
+ self.ensure_windows_drive_letters = FEATURE_DEV and self.payload[
87
+ "instance_type"
88
+ ].endswith("-w")
89
+ self.payload["output_path"] = self._ensure_windows_drive_letter(
90
+ self.payload["output_path"]
91
+ )
81
92
 
82
93
  self.payload["notify"] = {"emails": self.payload["notify"]}
83
94
 
@@ -86,25 +97,25 @@ class Submit(object):
86
97
  logger.debug("{}:{}".format(arg, self.payload[arg]))
87
98
 
88
99
  def upload_progress_callback(self, upload_stats):
89
- '''
100
+ """
90
101
  Call the progress handler
91
- '''
92
-
102
+ """
103
+
93
104
  if self.progress_handler:
94
- logger.debug("Sending progress update to {}".format(self.progress_handler))
105
+ logger.debug("Sending progress update to {}".format(self.progress_handler))
95
106
  self.progress_handler(upload_stats)
96
107
 
97
108
  def stop_work(self):
98
- '''
109
+ """
99
110
  Cancel the submission process
100
- '''
101
-
111
+ """
112
+
102
113
  logger.debug("Submitter was requested to stop work.")
103
114
 
104
115
  if self.uploader_:
105
- logger.debug("Uploader set to cancel.")
106
- self.uploader_.cancel=True
107
-
116
+ logger.debug("Uploader set to cancel.")
117
+ self.uploader_.cancel = True
118
+
108
119
  def main(self):
109
120
  """
110
121
  Submit the job
@@ -116,37 +127,42 @@ class Submit(object):
116
127
  2. local_upload=False: md5 calcs and uploads are performed on on any machine with access to
117
128
  the filesystem on which the files reside, and by the same paths as the submission machine.
118
129
  """
119
-
120
- self._log_threads(message_template="{thread_count} threads before starting upload")
121
130
 
122
- processed_filepaths = file_utils.process_upload_filepaths(self.upload_paths)
123
- file_map = {path: None for path in processed_filepaths}
131
+ self._log_threads(
132
+ message_template="{thread_count} threads before starting upload"
133
+ )
124
134
 
125
- if self.payload["local_upload"]:
126
- file_map = self._handle_local_upload(file_map)
127
-
128
- elif self.enforced_md5s:
129
- file_map = self._enforce_md5s(file_map)
135
+ if self.upload_paths:
130
136
 
131
- for path in file_map:
132
- expanded = self._expand_stats(path, file_map[path])
133
- self.payload["upload_files"].append(expanded)
134
- self.payload["upload_size"] += expanded["st_size"]
137
+ processed_filepaths = file_utils.process_upload_filepaths(self.upload_paths)
138
+ file_map = {path: None for path in processed_filepaths}
135
139
 
140
+ if self.payload["local_upload"]:
141
+ file_map = self._handle_local_upload(file_map)
142
+
143
+ elif self.enforced_md5s:
144
+ file_map = self._enforce_md5s(file_map)
145
+
146
+ for path in file_map:
147
+ expanded = self._expand_stats(path, file_map[path])
148
+ self.payload["upload_files"].append(expanded)
149
+ self.payload["upload_size"] += expanded["st_size"]
136
150
 
137
151
  self._log_threads(message_template="{thread_count} threads after upload")
138
-
152
+
139
153
  logger.info("Sending Job...")
140
-
154
+
141
155
  response, response_code = self.api_client.make_request(
142
- uri_path="jobs/",
143
- data=json.dumps(self.payload),
144
- raise_on_error=False,
145
- use_api_key=True
156
+ uri_path="jobs/",
157
+ data=json.dumps(self.payload),
158
+ raise_on_error=False,
159
+ use_api_key=True,
146
160
  )
147
161
 
148
162
  if response_code not in [201, 204]:
149
- raise Exception("Job Submission failed: Error %s ...\n%s" % (response_code, response))
163
+ raise Exception(
164
+ "Job Submission failed: Error %s ...\n%s" % (response_code, response)
165
+ )
150
166
 
151
167
  return json.loads(response), response_code
152
168
 
@@ -158,11 +174,11 @@ class Submit(object):
158
174
  """
159
175
  cfg = config.config().config
160
176
  api_client.read_conductor_credentials(use_api_key=True)
161
-
177
+
162
178
  # Don't use more threads than there are files
163
179
  thread_count = min(len(file_map), cfg["thread_count"])
164
180
  logger.info("Using {} threads for the uploader".format(thread_count))
165
-
181
+
166
182
  uploader_args = {
167
183
  "location": self.payload["location"],
168
184
  "database_filepath": self.database_filepath,
@@ -173,18 +189,25 @@ class Submit(object):
173
189
  self.uploader_ = uploader.Uploader(uploader_args)
174
190
 
175
191
  self.uploader_.progress_callback = self.upload_progress_callback
176
-
192
+
177
193
  self.uploader_.handle_upload_response(self.payload["project"], file_map)
178
-
194
+
179
195
  if self.uploader_.cancel:
180
- raise exceptions.UserCanceledError("Job submission was cancelled by the user")
196
+ raise exceptions.UserCanceledError(
197
+ "Job submission was cancelled by the user"
198
+ )
181
199
 
182
200
  if self.uploader_.error_messages:
183
201
  error_message = ""
184
202
  for cnt, err in enumerate(self.uploader_.error_messages):
185
- error_message += "Error {}:\n{}\n\n".format(cnt+1, "".join(traceback.format_exception(*err)))
186
- raise Exception("\n\nCould not upload files, encountered %s errors:\n\n%s" % (len(self.uploader_.error_messages), error_message))
187
-
203
+ error_message += "Error {}:\n{}\n\n".format(
204
+ cnt + 1, "".join(traceback.format_exception(*err))
205
+ )
206
+ raise Exception(
207
+ "\n\nCould not upload files, encountered %s errors:\n\n%s"
208
+ % (len(self.uploader_.error_messages), error_message)
209
+ )
210
+
188
211
  # Get the resulting dictionary of the file's and their corresponding md5 hashes
189
212
  upload_md5s = self.uploader_.return_md5s()
190
213
  for path in upload_md5s:
@@ -199,13 +222,13 @@ class Submit(object):
199
222
 
200
223
  Returns {"path1': enforced_md5_1, path2: enforced_md5_2}
201
224
  """
202
-
225
+
203
226
  progress_title = "Processing MD5 of local files"
204
227
  file_count = len(self.enforced_md5s)
205
228
 
206
229
  for cnt, filepath in enumerate(self.enforced_md5s):
207
- percentage_complete = float(cnt)/float(file_count)
208
-
230
+ percentage_complete = float(cnt) / float(file_count)
231
+
209
232
  md5 = self.enforced_md5s[filepath]
210
233
  logger.debug("filepath is %s" % filepath)
211
234
  processed_filepaths = file_utils.process_upload_filepath(filepath)
@@ -216,7 +239,6 @@ class Submit(object):
216
239
 
217
240
  return file_map
218
241
 
219
-
220
242
  def _expand_stats(self, file, md5):
221
243
  filestat = os.stat(file)
222
244
 
@@ -237,16 +259,15 @@ class Submit(object):
237
259
  "st_mtime": filestat.st_mtime,
238
260
  "st_ctime": filestat.st_ctime,
239
261
  }
240
-
262
+
241
263
  def _log_threads(self, message_template):
242
264
 
243
265
  threads = list(threading.enumerate())
244
-
266
+
245
267
  for t in threads:
246
268
  logger.debug(t)
247
269
 
248
- logger.debug(message_template.format(thread_count=len(threads)))
249
-
270
+ logger.debug(message_template.format(thread_count=len(threads)))
250
271
 
251
272
  def _ensure_windows_drive_letter(self, filepath):
252
273
  """
@@ -2450,7 +2450,7 @@ prompt the user to log in. </p>
2450
2450
  <span class="sd"> &quot;&quot;&quot;</span>
2451
2451
  <span class="n">api</span> <span class="o">=</span> <span class="n">ApiClient</span><span class="p">()</span>
2452
2452
  <span class="n">response</span><span class="p">,</span> <span class="n">response_code</span> <span class="o">=</span> <span class="n">api</span><span class="o">.</span><span class="n">make_request</span><span class="p">(</span>
2453
- <span class="s2">&quot;api/v2/instance-types&quot;</span><span class="p">,</span> <span class="n">use_api_key</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">raise_on_error</span><span class="o">=</span><span class="kc">False</span>
2453
+ <span class="s2">&quot;api/v1/instance-types&quot;</span><span class="p">,</span> <span class="n">use_api_key</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">raise_on_error</span><span class="o">=</span><span class="kc">False</span>
2454
2454
  <span class="p">)</span>
2455
2455
  <span class="k">if</span> <span class="n">response_code</span> <span class="ow">not</span> <span class="ow">in</span> <span class="p">(</span><span class="mi">200</span><span class="p">,):</span>
2456
2456
  <span class="n">msg</span> <span class="o">=</span> <span class="s2">&quot;Failed to get instance types&quot;</span>
@@ -1366,21 +1366,19 @@ HardwareSet </code>
1366
1366
  <span class="k">if</span> <span class="n">dual_platforms</span><span class="p">:</span>
1367
1367
  <span class="k">for</span> <span class="n">it</span> <span class="ow">in</span> <span class="n">instance_types</span><span class="p">:</span>
1368
1368
  <span class="n">flat_dict</span> <span class="o">=</span> <span class="n">flatten_dict</span><span class="p">(</span><span class="n">it</span><span class="p">)</span>
1369
- <span class="n">template_key</span> <span class="o">=</span> <span class="s2">&quot;cpu&quot;</span>
1370
- <span class="k">if</span> <span class="n">it</span><span class="p">[</span><span class="s2">&quot;name&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s2">&quot;best-fit&quot;</span><span class="p">):</span>
1371
- <span class="n">template_key</span> <span class="o">=</span> <span class="s2">&quot;best-fit&quot;</span>
1372
- <span class="k">elif</span> <span class="s2">&quot;gpu_count&quot;</span> <span class="ow">in</span> <span class="n">flat_dict</span><span class="p">:</span>
1373
- <span class="n">template_key</span> <span class="o">=</span> <span class="s2">&quot;gpu&quot;</span>
1374
- <span class="n">it</span><span class="p">[</span><span class="s2">&quot;description&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">DESCRIPTION_TEMPLATE_OS</span><span class="p">[</span><span class="n">template_key</span><span class="p">]</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="n">flat_dict</span><span class="p">)</span>
1369
+ <span class="n">is_gpu</span> <span class="o">=</span> <span class="s2">&quot;gpu_count&quot;</span> <span class="ow">in</span> <span class="n">flat_dict</span>
1370
+ <span class="k">if</span> <span class="n">is_gpu</span><span class="p">:</span>
1371
+ <span class="n">it</span><span class="p">[</span><span class="s2">&quot;description&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">DESCRIPTION_TEMPLATE_OS</span><span class="p">[</span><span class="s2">&quot;gpu&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="n">flat_dict</span><span class="p">)</span>
1372
+ <span class="k">else</span><span class="p">:</span>
1373
+ <span class="n">it</span><span class="p">[</span><span class="s2">&quot;description&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">DESCRIPTION_TEMPLATE_OS</span><span class="p">[</span><span class="s2">&quot;cpu&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="n">flat_dict</span><span class="p">)</span>
1375
1374
  <span class="k">else</span><span class="p">:</span>
1376
1375
  <span class="k">for</span> <span class="n">it</span> <span class="ow">in</span> <span class="n">instance_types</span><span class="p">:</span>
1377
1376
  <span class="n">flat_dict</span> <span class="o">=</span> <span class="n">flatten_dict</span><span class="p">(</span><span class="n">it</span><span class="p">)</span>
1378
- <span class="n">template_key</span> <span class="o">=</span> <span class="s2">&quot;cpu&quot;</span>
1379
- <span class="k">if</span> <span class="n">it</span><span class="p">[</span><span class="s2">&quot;name&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s2">&quot;best-fit&quot;</span><span class="p">):</span>
1380
- <span class="n">template_key</span> <span class="o">=</span> <span class="s2">&quot;best-fit&quot;</span>
1381
- <span class="k">elif</span> <span class="s2">&quot;gpu_count&quot;</span> <span class="ow">in</span> <span class="n">flat_dict</span><span class="p">:</span>
1382
- <span class="n">template_key</span> <span class="o">=</span> <span class="s2">&quot;gpu&quot;</span>
1383
- <span class="n">it</span><span class="p">[</span><span class="s2">&quot;description&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">DESCRIPTION_TEMPLATE</span><span class="p">[</span><span class="n">template_key</span><span class="p">]</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="n">flat_dict</span><span class="p">)</span>
1377
+ <span class="n">is_gpu</span> <span class="o">=</span> <span class="s2">&quot;gpu_count&quot;</span> <span class="ow">in</span> <span class="n">flat_dict</span>
1378
+ <span class="k">if</span> <span class="n">is_gpu</span><span class="p">:</span>
1379
+ <span class="n">it</span><span class="p">[</span><span class="s2">&quot;description&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">DESCRIPTION_TEMPLATE</span><span class="p">[</span><span class="s2">&quot;gpu&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="n">flat_dict</span><span class="p">)</span>
1380
+ <span class="k">else</span><span class="p">:</span>
1381
+ <span class="n">it</span><span class="p">[</span><span class="s2">&quot;description&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">DESCRIPTION_TEMPLATE</span><span class="p">[</span><span class="s2">&quot;cpu&quot;</span><span class="p">]</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="n">flat_dict</span><span class="p">)</span>
1384
1382
 
1385
1383
  <span class="k">return</span> <span class="n">instance_types</span>
1386
1384