nebu 0.1.88__py3-none-any.whl → 0.1.93__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nebu/data.py CHANGED
@@ -7,6 +7,8 @@ from urllib.parse import urlparse
7
7
  import boto3
8
8
  from botocore.exceptions import ClientError
9
9
 
10
+ from nebu.logging import logger
11
+
10
12
 
11
13
  def rclone_copy(
12
14
  source_dir: str,
@@ -45,7 +47,7 @@ def rclone_copy(
45
47
  command.extend(extra_args)
46
48
 
47
49
  if verbose:
48
- print("Running command:", " ".join(command))
50
+ logger.info(f"Running command: {' '.join(command)}")
49
51
 
50
52
  try:
51
53
  process = subprocess.Popen(
@@ -57,12 +59,12 @@ def rclone_copy(
57
59
 
58
60
  for line in process.stdout:
59
61
  if verbose:
60
- print(line.strip())
62
+ logger.debug(line.strip())
61
63
 
62
64
  return process.wait() == 0
63
65
 
64
66
  except Exception as e:
65
- print(f"Error during rclone copy: {e}")
67
+ logger.error(f"Error during rclone copy: {e}")
66
68
  return False
67
69
 
68
70
 
@@ -83,7 +85,7 @@ def find_latest_checkpoint(training_dir: str) -> Optional[str]:
83
85
  latest_checkpoint_dir = None
84
86
 
85
87
  if not os.path.isdir(training_dir):
86
- print(f"Error: Directory not found: {training_dir}")
88
+ logger.error(f"Error: Directory not found: {training_dir}")
87
89
  return None
88
90
 
89
91
  for item in os.listdir(training_dir):
@@ -134,7 +136,9 @@ class Bucket:
134
136
  """
135
137
  if aws_access_key_id and aws_secret_access_key:
136
138
  if verbose:
137
- print("Initializing S3 client with provided temporary credentials.")
139
+ logger.info(
140
+ "Initializing S3 client with provided temporary credentials."
141
+ )
138
142
  self.client = boto3.client(
139
143
  "s3",
140
144
  aws_access_key_id=aws_access_key_id,
@@ -143,7 +147,7 @@ class Bucket:
143
147
  )
144
148
  else:
145
149
  if verbose:
146
- print("Initializing S3 client with default credentials.")
150
+ logger.info("Initializing S3 client with default credentials.")
147
151
  self.client = boto3.client("s3")
148
152
  self.verbose = verbose
149
153
 
@@ -160,7 +164,7 @@ class Bucket:
160
164
  paginator = self.client.get_paginator("list_objects_v2")
161
165
  list_prefix = prefix or ""
162
166
  if self.verbose:
163
- print(f"Listing objects in s3://{bucket}/{list_prefix}...")
167
+ logger.info(f"Listing objects in s3://{bucket}/{list_prefix}...")
164
168
 
165
169
  operation_parameters = {"Bucket": bucket}
166
170
  if list_prefix:
@@ -203,21 +207,21 @@ class Bucket:
203
207
  except ClientError as e:
204
208
  if e.response["Error"]["Code"] == "NoSuchBucket":
205
209
  if self.verbose:
206
- print(f"Error: Bucket '{bucket}' not found.")
210
+ logger.error(f"Error: Bucket '{bucket}' not found.")
207
211
  elif e.response["Error"]["Code"] == "NoSuchKey" and prefix:
208
212
  if self.verbose:
209
- print(
213
+ logger.warning(
210
214
  f"Prefix s3://{bucket}/{prefix} not found (treating as empty)."
211
215
  )
212
216
  else:
213
- print(f"Error listing S3 objects: {e}")
217
+ logger.error(f"Error listing S3 objects: {e}")
214
218
  if e.response["Error"]["Code"] == "NoSuchBucket":
215
219
  return {}
216
220
  except Exception as e:
217
- print(f"An unexpected error occurred listing S3 objects: {e}")
221
+ logger.error(f"An unexpected error occurred listing S3 objects: {e}")
218
222
  return {}
219
223
  if self.verbose:
220
- print(f"Found {len(objects)} objects in S3.")
224
+ logger.info(f"Found {len(objects)} objects in S3.")
221
225
  return objects
222
226
 
223
227
  def _list_local(self, local_dir: str) -> Dict[str, Dict[str, Any]]:
@@ -225,13 +229,13 @@ class Bucket:
225
229
  files: Dict[str, Dict[str, Any]] = {}
226
230
  if not os.path.exists(local_dir):
227
231
  if self.verbose:
228
- print(
232
+ logger.warning(
229
233
  f"Warning: Local path not found: {local_dir} (treating as empty)."
230
234
  )
231
235
  return files
232
236
  if os.path.isfile(local_dir):
233
237
  if self.verbose:
234
- print(
238
+ logger.warning(
235
239
  f"Warning: Source {local_dir} is a file, not a directory. Syncing single file."
236
240
  )
237
241
  try:
@@ -245,10 +249,10 @@ class Bucket:
245
249
  "type": "local",
246
250
  }
247
251
  except OSError as e:
248
- print(f"Error accessing source file {local_dir}: {e}")
252
+ logger.error(f"Error accessing source file {local_dir}: {e}")
249
253
  return files
250
254
  if self.verbose:
251
- print(f"Scanning local directory: {local_dir}...")
255
+ logger.info(f"Scanning local directory: {local_dir}...")
252
256
  for root, _, file_list in os.walk(local_dir):
253
257
  for file_name in file_list:
254
258
  local_path = os.path.join(root, file_name)
@@ -265,11 +269,15 @@ class Bucket:
265
269
  "type": "local",
266
270
  }
267
271
  except OSError as e:
268
- print(f"Warning: Could not get metadata for {local_path}: {e}")
272
+ logger.warning(
273
+ f"Warning: Could not get metadata for {local_path}: {e}"
274
+ )
269
275
  except Exception as e:
270
- print(f"Warning: Unexpected error processing {local_path}: {e}")
276
+ logger.warning(
277
+ f"Warning: Unexpected error processing {local_path}: {e}"
278
+ )
271
279
  if self.verbose:
272
- print(f"Found {len(files)} files locally.")
280
+ logger.info(f"Found {len(files)} files locally.")
273
281
  return files
274
282
 
275
283
  def sync(
@@ -302,8 +310,8 @@ class Bucket:
302
310
  source_items = self._list_local(source)
303
311
  dest_items = self._list_objects(dest_bucket, dest_prefix)
304
312
  if not source_items and not os.path.exists(source):
305
- print(
306
- f"Error: Source path {source} not found and is not empty."
313
+ logger.warning(
314
+ f"Warning: Source path {source} not found, but proceeding as it might be an empty source sync."
307
315
  ) # Check needed? list_local handles it.
308
316
  # return # Let it proceed if source is just empty
309
317
  if os.path.isfile(source):
@@ -314,7 +322,7 @@ class Bucket:
314
322
  sync_direction = "download"
315
323
  source_items = self._list_objects(src_bucket, src_prefix)
316
324
  if os.path.exists(destination) and not os.path.isdir(destination):
317
- print(
325
+ logger.error(
318
326
  f"Error: Local destination '{destination}' exists but is not a directory."
319
327
  )
320
328
  return
@@ -322,20 +330,20 @@ class Bucket:
322
330
  if not dry_run:
323
331
  os.makedirs(destination, exist_ok=True)
324
332
  elif not os.path.isdir(destination) and self.verbose:
325
- print(f"Dry run: Would create local directory {destination}")
333
+ logger.info(f"Dry run: Would create local directory {destination}")
326
334
 
327
335
  elif src_bucket is None and dest_bucket is None:
328
- print(
336
+ logger.error(
329
337
  "Error: Both source and destination are local paths. Use standard file copy tools."
330
338
  )
331
339
  return
332
340
  elif src_bucket is not None and dest_bucket is not None:
333
- print(
341
+ logger.error(
334
342
  "Error: S3 to S3 sync not implemented. Use AWS CLI or S3 Batch Operations."
335
343
  )
336
344
  return
337
345
  else:
338
- print("Error: Invalid source or destination path combination.")
346
+ logger.error("Error: Invalid source or destination path combination.")
339
347
  return
340
348
 
341
349
  actions_to_perform: List[Dict[str, Any]] = []
@@ -414,7 +422,7 @@ class Bucket:
414
422
  s3_deletions_batch: List[Dict[str, str]] = []
415
423
  if not actions_to_perform:
416
424
  if self.verbose:
417
- print("Source and destination are already synchronized.")
425
+ logger.info("Source and destination are already synchronized.")
418
426
  # Optional: Add check if source exists if sync_direction == "upload" and not os.path.exists(source):
419
427
  return
420
428
 
@@ -425,12 +433,14 @@ class Bucket:
425
433
  local_path = action["source_path"]
426
434
  dest_full_path_or_key = action["dest_full_path_or_key"]
427
435
  if not isinstance(dest_full_path_or_key, str):
428
- print(f"ERROR: Invalid dest path: {dest_full_path_or_key}")
436
+ logger.error(f"ERROR: Invalid dest path: {dest_full_path_or_key}")
429
437
  continue
430
438
  _, upload_key = self._parse_path(dest_full_path_or_key)
431
439
  target_bucket = action["dest_bucket"]
432
440
  if self.verbose:
433
- print(f"Upload: {local_path} to {dest_full_path_or_key} ({reason})")
441
+ logger.info(
442
+ f"Upload: {local_path} to {dest_full_path_or_key} ({reason})"
443
+ )
434
444
  if not dry_run:
435
445
  if target_bucket and upload_key is not None:
436
446
  try:
@@ -439,11 +449,11 @@ class Bucket:
439
449
  )
440
450
  uploads_done += 1
441
451
  except ClientError as e:
442
- print(f"ERROR uploading {local_path}: {e}")
452
+ logger.error(f"ERROR uploading {local_path}: {e}")
443
453
  except Exception as e:
444
- print(f"ERROR uploading {local_path}: {e}")
454
+ logger.error(f"ERROR uploading {local_path}: {e}")
445
455
  else:
446
- print(
456
+ logger.error(
447
457
  f"ERROR: Invalid S3 target: bucket={target_bucket}, key={upload_key}"
448
458
  )
449
459
  elif action["action"] == "download":
@@ -451,11 +461,11 @@ class Bucket:
451
461
  local_path = action["dest_full_path_or_key"]
452
462
  source_bucket_dl = action["source_bucket"]
453
463
  if self.verbose:
454
- print(
464
+ logger.info(
455
465
  f"Download: {action['source_path']} to {local_path} ({reason})"
456
466
  )
457
467
  if not isinstance(local_path, str):
458
- print(f"ERROR: Invalid local dest path: {local_path}")
468
+ logger.error(f"ERROR: Invalid local dest path: {local_path}")
459
469
  continue
460
470
  if not dry_run:
461
471
  if source_bucket_dl and s3_key_full and local_path:
@@ -467,13 +477,13 @@ class Bucket:
467
477
  )
468
478
  downloads_done += 1
469
479
  except ClientError as e:
470
- print(f"ERROR downloading {s3_key_full}: {e}")
480
+ logger.error(f"ERROR downloading {s3_key_full}: {e}")
471
481
  except OSError as e:
472
- print(f"ERROR creating/writing {local_path}: {e}")
482
+ logger.error(f"ERROR creating/writing {local_path}: {e}")
473
483
  except Exception as e:
474
- print(f"ERROR downloading {s3_key_full}: {e}")
484
+ logger.error(f"ERROR downloading {s3_key_full}: {e}")
475
485
  else:
476
- print(
486
+ logger.error(
477
487
  f"ERROR: Invalid download params: bucket={source_bucket_dl}, key={s3_key_full}, local={local_path}"
478
488
  )
479
489
  elif action["action"] == "delete_s3":
@@ -481,25 +491,29 @@ class Bucket:
481
491
  target_bucket_del = action["dest_bucket"]
482
492
  if target_bucket_del and s3_key_to_delete:
483
493
  if self.verbose:
484
- print(f"Delete S3: {action['path_to_delete']} ({reason})")
494
+ logger.info(f"Delete S3: {action['path_to_delete']} ({reason})")
485
495
  if isinstance(s3_key_to_delete, str):
486
496
  s3_deletions_batch.append({"Key": s3_key_to_delete})
487
497
  else:
488
- print(f"ERROR: Invalid S3 key for deletion: {s3_key_to_delete}")
498
+ logger.error(
499
+ f"ERROR: Invalid S3 key for deletion: {s3_key_to_delete}"
500
+ )
489
501
  else:
490
- print(
502
+ logger.error(
491
503
  f"ERROR: Invalid S3 target for deletion: bucket={target_bucket_del}, key={s3_key_to_delete}"
492
504
  )
493
505
  elif action["action"] == "delete_local":
494
506
  local_path_to_delete = action["path_to_delete"]
495
507
  if self.verbose:
496
- print(f"Delete Local: {local_path_to_delete} ({reason})")
508
+ logger.info(f"Delete Local: {local_path_to_delete} ({reason})")
497
509
  if not dry_run:
498
510
  try:
499
511
  os.remove(local_path_to_delete)
500
512
  deletions_done += 1
501
513
  except OSError as e:
502
- print(f"ERROR deleting local file {local_path_to_delete}: {e}")
514
+ logger.error(
515
+ f"ERROR deleting local file {local_path_to_delete}: {e}"
516
+ )
503
517
 
504
518
  if s3_deletions_batch:
505
519
  target_bucket_del_batch = next(
@@ -523,20 +537,20 @@ class Bucket:
523
537
  if "Errors" in response and response["Errors"]:
524
538
  deleted_count_batch -= len(response["Errors"])
525
539
  for error in response["Errors"]:
526
- print(
540
+ logger.error(
527
541
  f"ERROR deleting S3 object {error['Key']}: {error['Code']} - {error['Message']}"
528
542
  )
529
543
  except ClientError as e:
530
- print(f"ERROR deleting S3 objects batch: {e}")
544
+ logger.error(f"ERROR deleting S3 objects batch: {e}")
531
545
  deleted_count_batch = 0
532
546
  except Exception as e:
533
- print(f"ERROR deleting S3 objects batch: {e}")
547
+ logger.error(f"ERROR deleting S3 objects batch: {e}")
534
548
  deleted_count_batch = 0
535
549
  deletions_done += deleted_count_batch
536
550
  elif target_bucket_del_batch:
537
551
  deletions_done = len(s3_deletions_batch)
538
552
  else:
539
- print(
553
+ logger.warning(
540
554
  "Warning: Could not determine target bucket for S3 deletion batch."
541
555
  )
542
556
 
@@ -552,24 +566,28 @@ class Bucket:
552
566
  delete_local_count = sum(
553
567
  1 for a in actions_to_perform if a["action"] == "delete_local"
554
568
  )
555
- print("\n--- DRY RUN SUMMARY ---")
569
+ logger.info("\n--- DRY RUN SUMMARY ---")
556
570
  if sync_direction == "upload":
557
- print(f"Would upload: {upload_count} file(s)")
571
+ logger.info(f"Would upload: {upload_count} file(s)")
558
572
  if delete:
559
- print(f"Would delete from S3: {delete_s3_count} object(s)")
573
+ logger.info(
574
+ f"Would delete from S3: {delete_s3_count} object(s)"
575
+ )
560
576
  elif sync_direction == "download":
561
- print(f"Would download: {download_count} file(s)")
577
+ logger.info(f"Would download: {download_count} file(s)")
562
578
  if delete:
563
- print(f"Would delete locally: {delete_local_count} file(s)")
564
- print("--- END DRY RUN ---")
579
+ logger.info(
580
+ f"Would delete locally: {delete_local_count} file(s)"
581
+ )
582
+ logger.info("--- END DRY RUN ---")
565
583
  else:
566
584
  if self.verbose:
567
585
  if sync_direction == "upload":
568
- print(
586
+ logger.info(
569
587
  f"Sync completed. Uploaded: {uploads_done} file(s). Deleted from S3: {deletions_done if delete else 0} object(s)."
570
588
  )
571
589
  elif sync_direction == "download":
572
- print(
590
+ logger.info(
573
591
  f"Sync completed. Downloaded: {downloads_done} file(s). Deleted locally: {deletions_done if delete else 0} file(s)."
574
592
  )
575
593
 
@@ -596,12 +614,12 @@ class Bucket:
596
614
  dest_bucket, dest_prefix = self._parse_path(destination)
597
615
 
598
616
  if src_bucket is None and dest_bucket is None:
599
- print(
617
+ logger.error(
600
618
  "Error: Both source and destination are local. Use 'shutil.copy' or 'shutil.copytree'."
601
619
  )
602
620
  return
603
621
  if src_bucket is not None and dest_bucket is not None:
604
- print(
622
+ logger.error(
605
623
  "Error: S3 to S3 copy not implemented. Use 'aws s3 cp' or boto3 'copy_object'."
606
624
  )
607
625
  return
@@ -609,7 +627,7 @@ class Bucket:
609
627
  # Upload: Local to S3
610
628
  if src_bucket is None and dest_bucket is not None:
611
629
  if not os.path.exists(source):
612
- print(f"Error: Local source path not found: {source}")
630
+ logger.error(f"Error: Local source path not found: {source}")
613
631
  return
614
632
  current_dest_prefix = dest_prefix or ""
615
633
 
@@ -621,19 +639,19 @@ class Bucket:
621
639
  else:
622
640
  s3_key = current_dest_prefix
623
641
  if self.verbose:
624
- print(f"Uploading {source} to s3://{dest_bucket}/{s3_key}")
642
+ logger.info(f"Uploading {source} to s3://{dest_bucket}/{s3_key}")
625
643
  try:
626
644
  self.client.upload_file(source, dest_bucket, s3_key)
627
645
  if self.verbose:
628
- print("Upload complete.")
646
+ logger.info("Upload complete.")
629
647
  except ClientError as e:
630
- print(f"ERROR uploading {source}: {e}")
648
+ logger.error(f"ERROR uploading {source}: {e}")
631
649
  except Exception as e:
632
- print(f"ERROR uploading {source}: {e}")
650
+ logger.error(f"ERROR uploading {source}: {e}")
633
651
 
634
652
  elif os.path.isdir(source):
635
653
  if self.verbose:
636
- print(
654
+ logger.info(
637
655
  f"Uploading directory {source}/* to s3://{dest_bucket}/{current_dest_prefix}/"
638
656
  )
639
657
  files_uploaded = files_failed = 0
@@ -645,24 +663,26 @@ class Bucket:
645
663
  current_dest_prefix, relative_path
646
664
  ).replace("\\", "/")
647
665
  if self.verbose:
648
- print(
666
+ logger.debug(
649
667
  f" Uploading {local_path} to s3://{dest_bucket}/{s3_key}"
650
668
  )
651
669
  try:
652
670
  self.client.upload_file(local_path, dest_bucket, s3_key)
653
671
  files_uploaded += 1
654
672
  except ClientError as e:
655
- print(f" ERROR uploading {local_path}: {e}")
673
+ logger.error(f" ERROR uploading {local_path}: {e}")
656
674
  files_failed += 1
657
675
  except Exception as e:
658
- print(f" ERROR uploading {local_path}: {e}")
676
+ logger.error(f" ERROR uploading {local_path}: {e}")
659
677
  files_failed += 1
660
678
  if self.verbose:
661
- print(
679
+ logger.info(
662
680
  f"Directory upload complete. Files uploaded: {files_uploaded}, Failed: {files_failed}"
663
681
  )
664
682
  else:
665
- print(f"Error: Source {source} is neither a file nor a directory.")
683
+ logger.error(
684
+ f"Error: Source {source} is neither a file nor a directory."
685
+ )
666
686
 
667
687
  # Download: S3 to Local
668
688
  elif src_bucket is not None and dest_bucket is None:
@@ -686,15 +706,15 @@ class Bucket:
686
706
  if e.response["Error"]["Code"] == "404":
687
707
  is_prefix_download = True # Assume prefix if object not found
688
708
  elif e.response["Error"]["Code"] == "NoSuchBucket":
689
- print(f"Error: Source bucket '{src_bucket}' not found.")
709
+ logger.error(f"Error: Source bucket '{src_bucket}' not found.")
690
710
  return
691
711
  else:
692
- print(
712
+ logger.error(
693
713
  f"Error checking S3 source s3://{src_bucket}/{current_src_prefix}: {e}"
694
714
  )
695
715
  return
696
716
  except Exception as e:
697
- print(
717
+ logger.error(
698
718
  f"Error checking S3 source s3://{src_bucket}/{current_src_prefix}: {e}"
699
719
  )
700
720
  return
@@ -711,7 +731,7 @@ class Bucket:
711
731
  if parent_dir:
712
732
  os.makedirs(parent_dir, exist_ok=True)
713
733
  if self.verbose:
714
- print(
734
+ logger.info(
715
735
  f"Downloading s3://{src_bucket}/{single_object_key} to {local_dest_path}"
716
736
  )
717
737
  try:
@@ -719,23 +739,23 @@ class Bucket:
719
739
  src_bucket, single_object_key, local_dest_path
720
740
  )
721
741
  if self.verbose:
722
- print("Download complete.")
742
+ logger.info("Download complete.")
723
743
  except ClientError as e:
724
- print(f"ERROR downloading {single_object_key}: {e}")
744
+ logger.error(f"ERROR downloading {single_object_key}: {e}")
725
745
  except OSError as e:
726
- print(f"ERROR creating/writing {local_dest_path}: {e}")
746
+ logger.error(f"ERROR creating/writing {local_dest_path}: {e}")
727
747
  except Exception as e:
728
- print(f"ERROR downloading {single_object_key}: {e}")
748
+ logger.error(f"ERROR downloading {single_object_key}: {e}")
729
749
 
730
750
  elif is_prefix_download:
731
751
  if os.path.exists(destination) and not os.path.isdir(destination):
732
- print(
752
+ logger.error(
733
753
  f"Error: Local destination '{destination}' exists but is not a directory."
734
754
  )
735
755
  return
736
756
  os.makedirs(destination, exist_ok=True)
737
757
  if self.verbose:
738
- print(
758
+ logger.info(
739
759
  f"Downloading prefix s3://{src_bucket}/{current_src_prefix}/* to {destination}/"
740
760
  )
741
761
  paginator = self.client.get_paginator("list_objects_v2")
@@ -780,7 +800,7 @@ class Bucket:
780
800
  )
781
801
  local_dest_dir = os.path.dirname(local_dest_path)
782
802
  if self.verbose:
783
- print(
803
+ logger.debug(
784
804
  f" Downloading s3://{src_bucket}/{s3_key} to {local_dest_path}"
785
805
  )
786
806
  try:
@@ -791,37 +811,37 @@ class Bucket:
791
811
  )
792
812
  files_downloaded += 1
793
813
  except ClientError as e:
794
- print(f" ERROR downloading {s3_key}: {e}")
814
+ logger.error(f" ERROR downloading {s3_key}: {e}")
795
815
  files_failed += 1
796
816
  except OSError as e:
797
- print(
817
+ logger.error(
798
818
  f" ERROR creating/writing {local_dest_path}: {e}"
799
819
  )
800
820
  files_failed += 1
801
821
  except Exception as e:
802
- print(f" ERROR downloading {s3_key}: {e}")
822
+ logger.error(f" ERROR downloading {s3_key}: {e}")
803
823
  files_failed += 1
804
824
  if not found_objects and self.verbose:
805
- print(
825
+ logger.warning(
806
826
  f"Warning: No objects found at source prefix s3://{src_bucket}/{current_src_prefix}"
807
827
  )
808
828
  if self.verbose:
809
- print(
829
+ logger.info(
810
830
  f"Prefix download complete. Files downloaded: {files_downloaded}, Failed: {files_failed}"
811
831
  )
812
832
  except ClientError as e:
813
833
  if e.response["Error"]["Code"] == "NoSuchBucket":
814
- print(f"Error: Source bucket '{src_bucket}' not found.")
834
+ logger.error(f"Error: Source bucket '{src_bucket}' not found.")
815
835
  else:
816
- print(
836
+ logger.error(
817
837
  f"Error listing objects in s3://{src_bucket}/{current_src_prefix}: {e}"
818
838
  )
819
839
  except Exception as e:
820
- print(
840
+ logger.error(
821
841
  f"Error listing objects in s3://{src_bucket}/{current_src_prefix}: {e}"
822
842
  )
823
843
  else:
824
- print("Error: Unknown copy operation type.")
844
+ logger.error("Error: Unknown copy operation type.")
825
845
 
826
846
  def check(self, s3_uri: str) -> bool:
827
847
  """
@@ -839,7 +859,7 @@ class Bucket:
839
859
 
840
860
  if bucket_name is None or s3_key is None:
841
861
  # _parse_path returns None, None if scheme is not 's3'
842
- print(f"Error: Invalid S3 URI format: {s3_uri}")
862
+ logger.error(f"Error: Invalid S3 URI format: {s3_uri}")
843
863
  return False
844
864
 
845
865
  is_prefix = s3_key.endswith("/")
@@ -865,13 +885,13 @@ class Bucket:
865
885
  return False
866
886
  elif e.response["Error"]["Code"] == "NoSuchBucket":
867
887
  if self.verbose:
868
- print(
888
+ logger.error(
869
889
  f"Error: Bucket '{bucket_name}' not found (from URI: {s3_uri})."
870
890
  )
871
891
  return False
872
892
  # Handle other potential errors like AccessDenied differently if needed
873
- print(f"Error checking {s3_uri}: {e}")
893
+ logger.error(f"Error checking {s3_uri}: {e}")
874
894
  return False
875
895
  except Exception as e:
876
- print(f"An unexpected error occurred checking {s3_uri}: {e}")
896
+ logger.error(f"An unexpected error occurred checking {s3_uri}: {e}")
877
897
  return False
nebu/logging.py ADDED
@@ -0,0 +1,33 @@
1
+ import os
2
+ import sys
3
+
4
+ from loguru import logger
5
+
6
+ # --- Loguru Configuration ---
7
+ logger.remove() # Remove default handler
8
+
9
+ # Get log level from environment variable, default to INFO, convert to uppercase
10
+ log_level_env = os.environ.get("PYTHON_LOG", "INFO").upper()
11
+
12
+ # Define valid log levels (uppercase)
13
+ valid_levels = ["TRACE", "DEBUG", "INFO", "SUCCESS", "WARNING", "ERROR", "CRITICAL"]
14
+
15
+ # Check if the provided level is valid
16
+ if log_level_env not in valid_levels:
17
+ # Use print to stderr here as logger might not be fully configured yet
18
+ print(
19
+ f"Warning: Invalid PYTHON_LOG level '{log_level_env}'. "
20
+ f"Valid levels are: {valid_levels}. Defaulting to 'INFO'.",
21
+ file=sys.stderr,
22
+ )
23
+ log_level = "INFO"
24
+ else:
25
+ log_level = log_level_env
26
+
27
+ # Add new handler with the configured level
28
+ logger.add(sys.stderr, level=log_level)
29
+
30
+ # --- End Loguru Configuration ---
31
+
32
+ # Export the configured logger
33
+ __all__ = ["logger"]