kleinkram 0.43.2.dev20250331124109__py3-none-any.whl → 0.58.0.dev20260110152317__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. kleinkram/api/client.py +6 -18
  2. kleinkram/api/deser.py +152 -1
  3. kleinkram/api/file_transfer.py +202 -101
  4. kleinkram/api/pagination.py +11 -2
  5. kleinkram/api/query.py +10 -10
  6. kleinkram/api/routes.py +192 -59
  7. kleinkram/auth.py +108 -7
  8. kleinkram/cli/_action.py +131 -0
  9. kleinkram/cli/_download.py +8 -19
  10. kleinkram/cli/_endpoint.py +2 -4
  11. kleinkram/cli/_file.py +6 -18
  12. kleinkram/cli/_file_validator.py +125 -0
  13. kleinkram/cli/_list.py +5 -15
  14. kleinkram/cli/_mission.py +24 -28
  15. kleinkram/cli/_project.py +10 -26
  16. kleinkram/cli/_run.py +220 -0
  17. kleinkram/cli/_upload.py +58 -26
  18. kleinkram/cli/_verify.py +59 -16
  19. kleinkram/cli/app.py +56 -17
  20. kleinkram/cli/error_handling.py +1 -3
  21. kleinkram/config.py +6 -21
  22. kleinkram/core.py +53 -43
  23. kleinkram/errors.py +12 -0
  24. kleinkram/models.py +51 -1
  25. kleinkram/printing.py +229 -18
  26. kleinkram/utils.py +10 -24
  27. kleinkram/wrappers.py +54 -30
  28. {kleinkram-0.43.2.dev20250331124109.dist-info → kleinkram-0.58.0.dev20260110152317.dist-info}/METADATA +6 -4
  29. kleinkram-0.58.0.dev20260110152317.dist-info/RECORD +53 -0
  30. {kleinkram-0.43.2.dev20250331124109.dist-info → kleinkram-0.58.0.dev20260110152317.dist-info}/WHEEL +1 -1
  31. {kleinkram-0.43.2.dev20250331124109.dist-info → kleinkram-0.58.0.dev20260110152317.dist-info}/top_level.txt +0 -1
  32. {testing → tests}/backend_fixtures.py +27 -3
  33. tests/conftest.py +1 -1
  34. tests/generate_test_data.py +314 -0
  35. tests/test_config.py +2 -6
  36. tests/test_core.py +11 -31
  37. tests/test_end_to_end.py +3 -5
  38. tests/test_fixtures.py +3 -5
  39. tests/test_printing.py +9 -11
  40. tests/test_utils.py +1 -3
  41. tests/test_wrappers.py +9 -27
  42. kleinkram-0.43.2.dev20250331124109.dist-info/RECORD +0 -50
  43. testing/__init__.py +0 -0
  44. {kleinkram-0.43.2.dev20250331124109.dist-info → kleinkram-0.58.0.dev20260110152317.dist-info}/entry_points.txt +0 -0
@@ -8,6 +8,7 @@ from concurrent.futures import as_completed
8
8
  from enum import Enum
9
9
  from pathlib import Path
10
10
  from time import monotonic
11
+ from time import sleep
11
12
  from typing import Dict
12
13
  from typing import NamedTuple
13
14
  from typing import Optional
@@ -17,6 +18,9 @@ from uuid import UUID
17
18
  import boto3.s3.transfer
18
19
  import botocore.config
19
20
  import httpx
21
+ from rich.console import Console
22
+ from tqdm import tqdm
23
+
20
24
  from kleinkram.api.client import AuthenticatedClient
21
25
  from kleinkram.config import get_config
22
26
  from kleinkram.errors import AccessDenied
@@ -27,21 +31,23 @@ from kleinkram.utils import format_bytes
27
31
  from kleinkram.utils import format_error
28
32
  from kleinkram.utils import format_traceback
29
33
  from kleinkram.utils import styled_string
30
- from rich.console import Console
31
- from tqdm import tqdm
32
34
 
33
35
  logger = logging.getLogger(__name__)
34
36
 
35
37
  UPLOAD_CREDS = "/files/temporaryAccess"
36
- UPLOAD_CONFIRM = "/queue/confirmUpload"
38
+ UPLOAD_CONFIRM = "/files/upload/confirm"
37
39
  UPLOAD_CANCEL = "/files/cancelUpload"
38
40
 
39
41
  DOWNLOAD_CHUNK_SIZE = 1024 * 1024 * 16
40
42
  DOWNLOAD_URL = "/files/download"
41
43
 
44
+ MAX_UPLOAD_RETRIES = 3
42
45
  S3_MAX_RETRIES = 60 # same as frontend
43
46
  S3_READ_TIMEOUT = 60 * 5 # 5 minutes
44
47
 
48
+ RETRY_BACKOFF_BASE = 2 # exponential backoff base
49
+ MAX_RETRIES = 5
50
+
45
51
 
46
52
  class UploadCredentials(NamedTuple):
47
53
  access_key: str
@@ -51,23 +57,20 @@ class UploadCredentials(NamedTuple):
51
57
  bucket: str
52
58
 
53
59
 
54
- def _confirm_file_upload(
55
- client: AuthenticatedClient, file_id: UUID, file_hash: str
56
- ) -> None:
60
+ def _confirm_file_upload(client: AuthenticatedClient, file_id: UUID, file_hash: str) -> None:
57
61
  data = {
58
62
  "uuid": str(file_id),
59
63
  "md5": file_hash,
64
+ "source": "CLI",
60
65
  }
61
66
  resp = client.post(UPLOAD_CONFIRM, json=data)
62
67
  resp.raise_for_status()
63
68
 
64
69
 
65
- def _cancel_file_upload(
66
- client: AuthenticatedClient, file_id: UUID, mission_id: UUID
67
- ) -> None:
70
+ def _cancel_file_upload(client: AuthenticatedClient, file_id: UUID, mission_id: UUID) -> None:
68
71
  data = {
69
- "uuid": [str(file_id)],
70
- "missionUUID": str(mission_id),
72
+ "uuids": [str(file_id)],
73
+ "missionUuid": str(mission_id),
71
74
  }
72
75
  resp = client.post(UPLOAD_CANCEL, json=data)
73
76
  resp.raise_for_status()
@@ -91,9 +94,16 @@ def _get_upload_creditials(
91
94
  dct = {
92
95
  "filenames": [internal_filename],
93
96
  "missionUUID": str(mission_id),
97
+ "source": "CLI",
94
98
  }
95
- resp = client.post(UPLOAD_CREDS, json=dct)
96
- resp.raise_for_status()
99
+ try:
100
+ resp = client.post(UPLOAD_CREDS, json=dct)
101
+ resp.raise_for_status()
102
+ except httpx.HTTPStatusError as e:
103
+ # 409 Conflict means file already exists
104
+ if e.response.status_code == 409:
105
+ return None
106
+ raise
97
107
 
98
108
  data = resp.json()["data"][0]
99
109
 
@@ -151,6 +161,33 @@ class UploadState(Enum):
151
161
  CANCELED = 3
152
162
 
153
163
 
164
+ def _get_upload_credentials_with_retry(client, pbar, filename, mission_id, max_attempts=5):
165
+ """
166
+ Retrieves upload credentials with retry logic.
167
+
168
+ Args:
169
+ client: The client object used for retrieving credentials.
170
+ filename: The internal filename.
171
+ mission_id: The mission ID.
172
+ max_attempts: Maximum number of retry attempts.
173
+
174
+ Returns:
175
+ The upload credentials or None if retrieval fails after all attempts.
176
+ """
177
+ attempt = 0
178
+ while attempt < max_attempts:
179
+ creds = _get_upload_creditials(client, internal_filename=filename, mission_id=mission_id)
180
+ if creds is not None:
181
+ return creds
182
+
183
+ attempt += 1
184
+ if attempt < max_attempts:
185
+ delay = 2**attempt # Exponential backoff (2, 4, 8, 16...)
186
+ sleep(delay)
187
+
188
+ return None
189
+
190
+
154
191
  # TODO: i dont want to handle errors at this level
155
192
  def upload_file(
156
193
  client: AuthenticatedClient,
@@ -161,79 +198,119 @@ def upload_file(
161
198
  verbose: bool = False,
162
199
  s3_endpoint: Optional[str] = None,
163
200
  ) -> Tuple[UploadState, int]:
164
- """\
201
+ """
165
202
  returns UploadState and bytes uploaded (0 if not uploaded)
203
+ Retries up to 3 times on failure.
166
204
  """
167
205
  if s3_endpoint is None:
168
206
  s3_endpoint = get_config().endpoint.s3
169
207
 
170
208
  total_size = path.stat().st_size
171
- with tqdm(
172
- total=total_size,
173
- unit="B",
174
- unit_scale=True,
175
- desc=f"uploading {path}...",
176
- leave=False,
177
- disable=not verbose,
178
- ) as pbar:
179
- # get per file upload credentials
180
- creds = _get_upload_creditials(
181
- client, internal_filename=filename, mission_id=mission_id
182
- )
183
- if creds is None:
184
- return UploadState.EXISTS, 0
209
+ for attempt in range(MAX_UPLOAD_RETRIES):
210
+ with tqdm(
211
+ total=total_size,
212
+ unit="B",
213
+ unit_scale=True,
214
+ desc=f"uploading {path}...",
215
+ leave=False,
216
+ disable=not verbose,
217
+ ) as pbar:
218
+
219
+ # get per file upload credentials
220
+ creds = _get_upload_credentials_with_retry(
221
+ client, pbar, filename, mission_id, max_attempts=5 if attempt > 0 else 1
222
+ )
223
+
224
+ if creds is None:
225
+ return UploadState.EXISTS, 0
185
226
 
186
- try:
187
- _s3_upload(path, endpoint=s3_endpoint, credentials=creds, pbar=pbar)
188
- except Exception as e:
189
- logger.error(format_traceback(e))
190
227
  try:
191
- _cancel_file_upload(client, creds.file_id, mission_id)
192
- except Exception as cancel_e:
193
- logger.error(f"Failed to cancel upload for {creds.file_id}: {cancel_e}")
194
- raise e from e
195
- else:
196
- _confirm_file_upload(client, creds.file_id, b64_md5(path))
197
- return UploadState.UPLOADED, total_size
228
+ _s3_upload(path, endpoint=s3_endpoint, credentials=creds, pbar=pbar)
229
+ except Exception as e:
230
+ logger.error(format_traceback(e))
231
+ try:
232
+ _cancel_file_upload(client, creds.file_id, mission_id)
233
+ except Exception as cancel_e:
234
+ logger.error(f"Failed to cancel upload for {creds.file_id}: {cancel_e}")
235
+
236
+ if attempt < 2: # Retry if not the last attempt
237
+ pbar.update(0)
238
+ logger.error(f"Retrying upload for {attempt + 1}")
239
+ continue
240
+ else:
241
+ logger.error(f"Cancelling upload for {attempt}")
242
+ raise e from e
243
+
244
+ else:
245
+ _confirm_file_upload(client, creds.file_id, b64_md5(path))
246
+ return UploadState.UPLOADED, total_size
198
247
 
199
248
 
200
249
  def _get_file_download(client: AuthenticatedClient, id: UUID) -> str:
201
250
  """\
202
251
  get the download url for a file by file id
203
252
  """
204
- resp = client.get(DOWNLOAD_URL, params={"uuid": str(id), "expires": True})
253
+ resp = client.get(DOWNLOAD_URL, params={"uuid": str(id), "expires": True, "preview_only": False})
205
254
 
206
255
  if 400 <= resp.status_code < 500:
207
256
  raise AccessDenied(
208
- f"Failed to download file: {resp.json()['message']}"
209
- f" Status Code: {resp.status_code}",
257
+ f"Failed to download file: {resp.json()['message']}" f" Status Code: {resp.status_code}",
210
258
  )
211
259
 
212
260
  resp.raise_for_status()
213
261
 
214
- return resp.text
262
+ return resp.json()["url"]
215
263
 
216
264
 
217
- def _url_download(
218
- url: str, *, path: Path, size: int, overwrite: bool = False, verbose: bool = False
219
- ) -> None:
220
- if path.exists() and not overwrite:
221
- raise FileExistsError(f"file already exists: {path}")
222
-
223
- with httpx.stream("GET", url) as response:
224
- response.raise_for_status()
225
- with open(path, "wb") as f:
226
- with tqdm(
227
- total=size,
228
- desc=f"downloading {path.name}",
229
- unit="B",
230
- unit_scale=True,
231
- leave=False,
232
- disable=not verbose,
233
- ) as pbar:
234
- for chunk in response.iter_bytes(chunk_size=DOWNLOAD_CHUNK_SIZE):
235
- f.write(chunk)
236
- pbar.update(len(chunk))
265
+ def _url_download(url: str, *, path: Path, size: int, overwrite: bool = False, verbose: bool = False) -> None:
266
+ if path.exists():
267
+ if overwrite:
268
+ path.unlink()
269
+ downloaded = 0
270
+ else:
271
+ downloaded = path.stat().st_size
272
+ if downloaded >= size:
273
+ raise FileExistsError(f"file already exists and is complete: {path}")
274
+ else:
275
+ downloaded = 0
276
+
277
+ attempt = 0
278
+ while downloaded < size:
279
+ try:
280
+ headers = {"Range": f"bytes={downloaded}-"}
281
+ with httpx.stream("GET", url, headers=headers, timeout=S3_READ_TIMEOUT) as response:
282
+ # Accept both 206 Partial Content and 200 OK if starting from 0
283
+ if not (response.status_code == 206 or (downloaded == 0 and response.status_code == 200)):
284
+ response.raise_for_status()
285
+ raise RuntimeError(f"Expected 206 Partial Content, got {response.status_code}")
286
+
287
+ mode = "ab" if downloaded > 0 else "wb"
288
+ with open(path, mode) as f:
289
+ with tqdm(
290
+ total=size,
291
+ initial=downloaded,
292
+ desc=f"downloading {path.name}",
293
+ unit="B",
294
+ unit_scale=True,
295
+ leave=False,
296
+ disable=not verbose,
297
+ ) as pbar:
298
+ for chunk in response.iter_bytes(chunk_size=DOWNLOAD_CHUNK_SIZE):
299
+ attempt = 0 # reset attempt counter on successful download of non-empty chunk
300
+ if not chunk:
301
+ break
302
+ f.write(chunk)
303
+ downloaded += len(chunk)
304
+ pbar.update(len(chunk))
305
+ break # download complete
306
+ except Exception as e:
307
+ logger.info(f"Error: {e}, retrying...")
308
+ attempt += 1
309
+ if attempt > MAX_RETRIES:
310
+ raise RuntimeError(f"Download failed after {MAX_RETRIES} retries due to {e}") from e
311
+ if verbose:
312
+ print(f"{e} on attempt {attempt}/{MAX_RETRIES}, retrying after backoff...")
313
+ sleep(RETRY_BACKOFF_BASE**attempt)
237
314
 
238
315
 
239
316
  class DownloadState(Enum):
@@ -242,6 +319,7 @@ class DownloadState(Enum):
242
319
  DOWNLOADED_INVALID_HASH = 3
243
320
  SKIPPED_INVALID_HASH = 4
244
321
  SKIPPED_INVALID_REMOTE_STATE = 5
322
+ SKIPPED_FILE_SIZE_MISMATCH = 6
245
323
 
246
324
 
247
325
  def download_file(
@@ -260,18 +338,24 @@ def download_file(
260
338
  return DownloadState.SKIPPED_INVALID_REMOTE_STATE, 0
261
339
 
262
340
  if path.exists():
263
- local_hash = b64_md5(path)
264
- if local_hash != file.hash and not overwrite and file.hash is not None:
265
- return DownloadState.SKIPPED_INVALID_HASH
266
341
 
267
- elif local_hash == file.hash:
268
- return DownloadState.SKIPPED_OK
342
+ # compare file size
343
+ if file.size == path.stat().st_size:
344
+ local_hash = b64_md5(path)
345
+ if local_hash != file.hash and not overwrite and file.hash is not None:
346
+ return DownloadState.SKIPPED_INVALID_HASH, 0
269
347
 
270
- # this has to be here
271
- if verbose:
272
- tqdm.write(
273
- styled_string(f"overwriting {path}, hash missmatch", style="yellow")
274
- )
348
+ elif local_hash == file.hash:
349
+ return DownloadState.SKIPPED_OK, 0
350
+
351
+ elif verbose:
352
+ tqdm.write(styled_string(f"overwriting {path}, hash mismatch", style="yellow"))
353
+
354
+ elif not overwrite and file.size is not None:
355
+ return DownloadState.SKIPPED_FILE_SIZE_MISMATCH, 0
356
+
357
+ elif verbose:
358
+ tqdm.write(styled_string(f"overwriting {path}, file size mismatch", style="yellow"))
275
359
 
276
360
  # request a download url
277
361
  download_url = _get_file_download(client, file.id)
@@ -301,6 +385,10 @@ def download_file(
301
385
 
302
386
  observed_hash = b64_md5(path)
303
387
  if file.hash is not None and observed_hash != file.hash:
388
+ print(
389
+ f"HASH MISMATCH: {path} expected={file.hash} observed={observed_hash}",
390
+ file=sys.stderr,
391
+ )
304
392
  # Download completed but hash failed
305
393
  return (
306
394
  DownloadState.DOWNLOADED_INVALID_HASH,
@@ -317,9 +405,7 @@ UPLOAD_STATE_COLOR = {
317
405
  }
318
406
 
319
407
 
320
- def _upload_handler(
321
- future: Future[Tuple[UploadState, int]], path: Path, *, verbose: bool = False
322
- ) -> int:
408
+ def _upload_handler(future: Future[Tuple[UploadState, int]], path: Path, *, verbose: bool = False) -> int:
323
409
  """Returns bytes uploaded successfully."""
324
410
  state = UploadState.CANCELED # Default to canceled if exception occurs
325
411
  size_bytes = 0
@@ -328,7 +414,7 @@ def _upload_handler(
328
414
  except Exception as e:
329
415
  logger.error(format_traceback(e))
330
416
  if verbose:
331
- tqdm.write(format_error(f"error uploading", e, verbose=verbose))
417
+ tqdm.write(format_error("error uploading", e, verbose=verbose))
332
418
  else:
333
419
  print(f"ERROR: {path.absolute()}: {e}", file=sys.stderr)
334
420
  return 0 # Return 0 bytes on error
@@ -353,6 +439,7 @@ DOWNLOAD_STATE_COLOR = {
353
439
  DownloadState.SKIPPED_OK: "green",
354
440
  DownloadState.DOWNLOADED_INVALID_HASH: "red",
355
441
  DownloadState.SKIPPED_INVALID_HASH: "yellow",
442
+ DownloadState.SKIPPED_FILE_SIZE_MISMATCH: "yellow",
356
443
  DownloadState.SKIPPED_INVALID_REMOTE_STATE: "purple",
357
444
  }
358
445
 
@@ -385,6 +472,8 @@ def _download_handler(
385
472
  msg = f"skipped {path} already downloaded (hash ok)"
386
473
  elif state == DownloadState.SKIPPED_INVALID_HASH:
387
474
  msg = f"skipped {path}, exists with hash mismatch (use --overwrite?)"
475
+ elif state == DownloadState.SKIPPED_FILE_SIZE_MISMATCH:
476
+ msg = f"skipped {path}, exists with file size mismatch (use --overwrite?)"
388
477
  elif state == DownloadState.SKIPPED_INVALID_REMOTE_STATE:
389
478
  msg = f"skipped {path}, remote file has invalid state ({file.state.value})"
390
479
  else:
@@ -395,11 +484,7 @@ def _download_handler(
395
484
  elif state not in (DownloadState.DOWNLOADED_OK, DownloadState.SKIPPED_OK):
396
485
  print(f"SKIP/FAIL: {path.absolute()} ({state.name})", file=sys.stderr)
397
486
 
398
- return (
399
- size_bytes
400
- if state in (DownloadState.DOWNLOADED_OK, DownloadState.SKIPPED_OK)
401
- else 0
402
- )
487
+ return size_bytes if state in (DownloadState.DOWNLOADED_OK, DownloadState.SKIPPED_OK) else 0
403
488
 
404
489
 
405
490
  def upload_files(
@@ -416,16 +501,17 @@ def upload_files(
416
501
  unit="files",
417
502
  desc="Uploading files",
418
503
  disable=not verbose,
419
- leave=False,
504
+ leave=True,
420
505
  ) as pbar:
421
506
  start = monotonic()
422
507
  futures: Dict[Future[Tuple[UploadState, int]], Path] = {}
508
+
509
+ skipped_files = 0
510
+ failed_files = 0
423
511
  with ThreadPoolExecutor(max_workers=n_workers) as executor:
424
512
  for name, path in files.items():
425
513
  if not path.is_file():
426
- console.print(
427
- f"[yellow]Skipping non-existent file: {path}[/yellow]"
428
- )
514
+ console.print(f"[yellow]Skipping non-existent file: {path}[/yellow]")
429
515
  pbar.update()
430
516
  continue
431
517
 
@@ -441,20 +527,38 @@ def upload_files(
441
527
 
442
528
  total_uploaded_bytes = 0
443
529
  for future in as_completed(futures):
530
+
531
+ if future.exception():
532
+ failed_files += 1
533
+
534
+ if future.exception() is None and future.result()[0] == UploadState.EXISTS:
535
+ skipped_files += 1
536
+
444
537
  path = futures[future]
445
538
  uploaded_bytes = _upload_handler(future, path, verbose=verbose)
446
539
  total_uploaded_bytes += uploaded_bytes
447
540
  pbar.update()
448
541
 
449
- end = monotonic()
450
- elapsed_time = end - start
542
+ end = monotonic()
543
+ elapsed_time = end - start
451
544
 
452
- avg_speed_bps = total_uploaded_bytes / elapsed_time if elapsed_time > 0 else 0
545
+ avg_speed_bps = total_uploaded_bytes / elapsed_time if elapsed_time > 0 else 0
453
546
 
547
+ if verbose:
548
+ console.print()
454
549
  console.print(f"Upload took {elapsed_time:.2f} seconds")
455
550
  console.print(f"Total uploaded: {format_bytes(total_uploaded_bytes)}")
456
551
  console.print(f"Average speed: {format_bytes(avg_speed_bps, speed=True)}")
457
552
 
553
+ if failed_files > 0:
554
+ console.print(
555
+ f"\nUploaded {len(files) - failed_files - skipped_files} files, "
556
+ f"{skipped_files} skipped, {failed_files} uploads failed",
557
+ style="red",
558
+ )
559
+ else:
560
+ console.print(f"\nUploaded {len(files) - skipped_files} files, {skipped_files} skipped")
561
+
458
562
 
459
563
  def download_files(
460
564
  client: AuthenticatedClient,
@@ -470,7 +574,7 @@ def download_files(
470
574
  unit="files",
471
575
  desc="Downloading files",
472
576
  disable=not verbose,
473
- leave=False,
577
+ leave=True,
474
578
  ) as pbar:
475
579
 
476
580
  start = monotonic()
@@ -490,18 +594,15 @@ def download_files(
490
594
  total_downloaded_bytes = 0
491
595
  for future in as_completed(futures):
492
596
  file, path = futures[future]
493
- downloaded_bytes = _download_handler(
494
- future, file, path, verbose=verbose
495
- )
597
+ downloaded_bytes = _download_handler(future, file, path, verbose=verbose)
496
598
  total_downloaded_bytes += downloaded_bytes
497
599
  pbar.update()
498
600
 
499
- end = monotonic()
500
- elapsed_time = end - start
501
- avg_speed_bps = total_downloaded_bytes / elapsed_time if elapsed_time > 0 else 0
601
+ end = monotonic()
602
+ elapsed_time = end - start
603
+ avg_speed_bps = total_downloaded_bytes / elapsed_time if elapsed_time > 0 else 0
502
604
 
503
- console.print(f"Download took {elapsed_time:.2f} seconds")
504
- console.print(
505
- f"Total downloaded/verified: {format_bytes(total_downloaded_bytes)}"
506
- )
507
- console.print(f"Average speed: {format_bytes(avg_speed_bps, speed=True)}")
605
+ console.print()
606
+ console.print(f"Download took {elapsed_time:.2f} seconds")
607
+ console.print(f"Total downloaded/verified: {format_bytes(total_downloaded_bytes)}")
608
+ console.print(f"Average speed: {format_bytes(avg_speed_bps, speed=True)}")
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- from enum import Enum
4
3
  from typing import Any
5
4
  from typing import Dict
6
5
  from typing import Generator
@@ -17,6 +16,7 @@ DataPage = Dict[str, Any]
17
16
  PAGE_SIZE = 128
18
17
  SKIP = "skip"
19
18
  TAKE = "take"
19
+ EXACT_MATCH = "exactMatch"
20
20
 
21
21
 
22
22
  def paginated_request(
@@ -25,6 +25,7 @@ def paginated_request(
25
25
  params: Optional[Mapping[str, Any]] = None,
26
26
  max_entries: Optional[int] = None,
27
27
  page_size: int = PAGE_SIZE,
28
+ exact_match: bool = False,
28
29
  ) -> Generator[DataPage, None, None]:
29
30
  total_entries_count = 0
30
31
 
@@ -32,10 +33,18 @@ def paginated_request(
32
33
 
33
34
  params[TAKE] = page_size
34
35
  params[SKIP] = 0
36
+ if exact_match:
37
+ params[EXACT_MATCH] = str(exact_match).lower() # pass string rather than bool
35
38
 
36
39
  while True:
37
40
  resp = client.get(endpoint, params=params)
38
- resp.raise_for_status() # TODO: this is fine for now
41
+
42
+ # explicitly handle 404 if json contains message
43
+ if resp.status_code == 404 and "message" in resp.json():
44
+ raise ValueError(resp.json()["message"])
45
+
46
+ # raise for other errors
47
+ resp.raise_for_status()
39
48
 
40
49
  paged_data = resp.json()
41
50
  data_page = cast(List[DataPage], paged_data["data"])
kleinkram/api/query.py CHANGED
@@ -41,6 +41,14 @@ class FileQuery:
41
41
  mission_query: MissionQuery = field(default_factory=MissionQuery)
42
42
 
43
43
 
44
+ @dataclass
45
+ class RunQuery:
46
+ mission_ids: List[UUID] = field(default_factory=list)
47
+ mission_patterns: List[str] = field(default_factory=list)
48
+ project_ids: List[UUID] = field(default_factory=list)
49
+ project_patterns: List[str] = field(default_factory=list)
50
+
51
+
44
52
  def check_mission_query_is_creatable(query: MissionQuery) -> str:
45
53
  """\
46
54
  check if a query is unique and can be used to create a mission
@@ -87,11 +95,7 @@ def mission_query_is_unique(query: MissionQuery) -> bool:
87
95
  return True
88
96
 
89
97
  # a single mission name a unique project spec are specified
90
- if (
91
- project_query_is_unique(query.project_query)
92
- and len(query.patterns) == 1
93
- and _pattern_is_unique(query.patterns[0])
94
- ):
98
+ if project_query_is_unique(query.project_query) and len(query.patterns) == 1 and _pattern_is_unique(query.patterns[0]):
95
99
  return True
96
100
  return False
97
101
 
@@ -102,10 +106,6 @@ def file_query_is_unique(query: FileQuery) -> bool:
102
106
  return True
103
107
 
104
108
  # a single file name a unique mission spec are specified
105
- if (
106
- mission_query_is_unique(query.mission_query)
107
- and len(query.patterns) == 1
108
- and _pattern_is_unique(query.patterns[0])
109
- ):
109
+ if mission_query_is_unique(query.mission_query) and len(query.patterns) == 1 and _pattern_is_unique(query.patterns[0]):
110
110
  return True
111
111
  return False