poetry-plugin-ivcap 0.5.0__tar.gz → 0.5.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: poetry-plugin-ivcap
3
- Version: 0.5.0
3
+ Version: 0.5.1
4
4
  Summary: A custom Poetry command for IVCAP deployments
5
5
  License: MIT
6
6
  Author: Max Ott
@@ -158,13 +158,15 @@ def exec_job(data, args, is_silent, line):
158
158
  pa = p.parse_args(args)
159
159
  timeout = 0 if pa.stream else pa.timeout
160
160
  # Get access token using ivcap CLI
161
- try:
162
- token = subprocess.check_output(
163
- ["ivcap", "--silent", "context", "get", "access-token", "--refresh-token"],
164
- text=True
165
- ).strip()
166
- except Exception as e:
167
- raise RuntimeError(f"Failed to get IVCAP access token: {e}")
161
+ token = pa.auth_token
162
+ if not token:
163
+ try:
164
+ token = subprocess.check_output(
165
+ ["ivcap", "--silent", "context", "get", "access-token", "--refresh-token"],
166
+ text=True
167
+ ).strip()
168
+ except Exception as e:
169
+ raise RuntimeError(f"Failed to get IVCAP access token: {e}")
168
170
 
169
171
  # Get IVCAP deployment URL
170
172
  try:
@@ -211,15 +213,14 @@ def exec_job(data, args, is_silent, line):
211
213
  location = f"{payload.get('location')}"
212
214
  job_id = payload.get("job-id")
213
215
  retry_later = payload.get("retry-later", 5)
214
- if not is_silent:
215
- line(f"<debug>Job '{job_id}' accepted, but no result yet. Polling in {retry_later} seconds.</debug>")
216
- if pa.stream:
217
- stream_result(location, token, pa)
218
- else:
219
- poll_for_result(location, retry_later, token, is_silent, line)
220
-
221
216
  except Exception as e:
222
217
  line(f"<error>Failed to handle 202 response: {e}</error>")
218
+
219
+ if pa.stream:
220
+ stream_result(location, job_id, token, pa, is_silent, line)
221
+ else:
222
+ poll_for_result(location, job_id, retry_later, token, is_silent, line)
223
+
223
224
  else:
224
225
  handle_response(response, line)
225
226
 
@@ -248,7 +249,9 @@ def handle_response(resp, line):
248
249
  line(f"<warning>Headers: {str(resp.headers)}</warning>")
249
250
  return "unknown"
250
251
 
251
- def poll_for_result(location, retry_later, token, is_silent, line):
252
+ def poll_for_result(location, job_id, retry_later, token, is_silent, line):
253
+ if not is_silent:
254
+ line(f"<debug>Job '{job_id}' accepted, but no result yet. Polling in {retry_later} seconds.</debug>")
252
255
  while True:
253
256
  time.sleep(retry_later)
254
257
  poll_headers = {
@@ -273,35 +276,97 @@ def poll_for_result(location, retry_later, token, is_silent, line):
273
276
  else:
274
277
  break
275
278
 
276
- def stream_result(location, token, pa):
279
+ CONNECT_TIMEOUT = 5
280
+ READ_TIMEOUT = 120 # ensure server sends heartbeat within this window
281
+
282
+ def stream_result(location, job_id, token, pa, is_silent, line):
277
283
  """
278
284
  Stream the result content from the given location using the provided token.
279
285
  """
286
+ if not is_silent:
287
+ line(f"<debug>Job '{job_id}' accepted, Waiting for events now.</debug>")
280
288
  headers = {
281
289
  "Authorization": f"Bearer {token}",
282
- "Accept": "text/event-stream"
290
+ "Accept": "text/event-stream",
291
+ "Cache-Control": "no-cache",
292
+ "Connection": "keep-alive",
283
293
  }
284
- with requests.get(location + "/events", stream=True, headers=headers, timeout=(5, 65)) as r:
285
- r.raise_for_status()
286
- for row in r.iter_lines(decode_unicode=True, chunk_size=1):
287
- if row is None:
288
- continue
289
- if row.startswith(":"):
290
- # comment/heartbeat
291
- continue
292
- print_sse_row(row, pa) # raw SSE lines (e.g., "data: {...}", "event: message")
293
-
294
- def print_sse_row(row, pa):
294
+ url = location + "/events"
295
+ session = requests.Session()
296
+ last_event_id = None
297
+ backoff = 1.0
298
+ while True:
299
+ try:
300
+ if last_event_id:
301
+ headers["Last-Event-ID"] = last_event_id
302
+ with session.get(url, stream=True, headers=headers, timeout=(CONNECT_TIMEOUT, READ_TIMEOUT)) as r:
303
+ r.raise_for_status()
304
+ # Reset backoff on successful connect
305
+ backoff = 1.0
306
+ for row in r.iter_lines(decode_unicode=True, chunk_size=1):
307
+ if row is None:
308
+ continue
309
+ if row.startswith(":"):
310
+ # comment/heartbeat
311
+ continue
312
+ if row.startswith("id:"):
313
+ last_event_id = row[3:].strip()
314
+ continue
315
+ if print_sse_row(row, pa, line): # raw SSE lines (e.g., "data: {...}", "event: message")
316
+ return # done
317
+
318
+ except (requests.exceptions.ChunkedEncodingError,
319
+ requests.exceptions.ConnectionError,
320
+ requests.exceptions.ReadTimeout) as e:
321
+ if not is_silent:
322
+ line(f"<debug>stream error: {e}; reconnecting in {backoff:.1f}s</debug>")
323
+ time.sleep(backoff)
324
+ backoff = min(backoff * 2, 30.0) # cap backoff
325
+ continue
326
+ except requests.HTTPError as e:
327
+ # Non-200 or similar; backoff and retry
328
+ if not is_silent:
329
+ line(f"<debug>http error: {e}; reconnecting in {backoff:.1f}s</debug>")
330
+ time.sleep(backoff)
331
+ backoff = min(backoff * 2, 60.0)
332
+ except Exception:
333
+ line(f"<error>Failed to fetch events: {type(e)} - {e}</error>")
334
+ break
335
+
336
+ # except requests.exceptions.ChunkedEncodingError as ce:
337
+ # line(f"<error>Chunked encoding error: {ce}</error>")
338
+ # except Exception as e:
339
+ # line(f"<error>Failed to fetch events: {type(e)} - {e}</error>")
340
+
341
+ def print_sse_row(row, pa, line) -> bool:
295
342
  if pa.raw_events:
296
343
  print(row)
344
+ return False # continue streaming
297
345
  elif row.startswith("data: "):
298
346
  # JSON data
299
347
  print("----")
300
348
  try:
301
349
  data = json.loads(row[6:])
302
350
  print(json.dumps(data, indent=2, sort_keys=True))
351
+ return check_if_done(data)
303
352
  except json.JSONDecodeError as e:
304
- print(f"Failed to decode JSON: {e}")
353
+ line(f"<error>Failed to decode JSON: {e}</error>")
354
+
355
+
356
+ def check_if_done(data) -> bool:
357
+ # {
358
+ # "Data": {
359
+ # "job-urn": "urn:ivcap:job:3e466031-ec8b-44eb-aec6-dc2f8212bec3",
360
+ # "status": "succeeded"
361
+ # },
362
+ # "Type": "ivcap.job.status"
363
+ # }
364
+ if "Type" in data and data["Type"] == "ivcap.job.status":
365
+ if "Data" in data and "status" in data["Data"]:
366
+ status = data["Data"]["status"]
367
+ if status in ["succeeded", "failed", "error"]:
368
+ return True
369
+ return False # continue streaming
305
370
 
306
371
  def exec_parser():
307
372
  p = argparse.ArgumentParser(prog="poetry ivcap job-exec request_file --")
@@ -310,6 +375,7 @@ def exec_parser():
310
375
  help="include result content in the response")
311
376
  p.add_argument("--stream", action="store_true", help="stream the result content")
312
377
  p.add_argument("--raw-events", action="store_true", help="print raw SSE events")
378
+ p.add_argument("--auth-token", help="alternative auth token to use")
313
379
  return p
314
380
 
315
381
  def nonneg_int(s: str) -> int:
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "poetry-plugin-ivcap"
3
- version = "0.5.0"
3
+ version = "0.5.1"
4
4
  description = "A custom Poetry command for IVCAP deployments"
5
5
  authors = ["Max Ott <max.ott@csiro.au>"]
6
6
  license = "MIT"