rdxz2-utill 0.0.2__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
utill/my_cli.py ADDED
@@ -0,0 +1,48 @@
1
+ import subprocess
2
+
3
+ from loguru import logger
4
+
5
+
6
+ def _cli(command: list[str], cwd: str | None = None, shell: bool = False):
7
+ if shell: # bash
8
+ command_str = " ".join(command)
9
+ process = subprocess.Popen(
10
+ command_str,
11
+ stdout=subprocess.PIPE,
12
+ stderr=subprocess.STDOUT,
13
+ text=True,
14
+ shell=True,
15
+ executable="/bin/bash",
16
+ cwd=cwd,
17
+ )
18
+ else:
19
+ process = subprocess.Popen(
20
+ command,
21
+ stdout=subprocess.PIPE,
22
+ stderr=subprocess.STDOUT,
23
+ text=True,
24
+ cwd=cwd,
25
+ )
26
+
27
+ for stdout_line in iter(process.stdout.readline, ""):
28
+ yield stdout_line
29
+
30
+ process.stdout.close()
31
+ return_code = process.wait()
32
+ if return_code != 0:
33
+ logger.error(f"Command failed with return code {process.returncode}")
34
+ raise subprocess.CalledProcessError(return_code, command)
35
+
36
+
37
+ def shell(command: list[str], cwd: str | None = None, print_stdout: bool = True):
38
+ logger.info(f"Executing command: {' '.join(command)}")
39
+ for res in _cli(command, cwd, shell=True):
40
+ if print_stdout:
41
+ logger.info(res.replace("\n", ""))
42
+
43
+
44
+ def bash(command: list[str], cwd: str | None = None, print_stdout: bool = True):
45
+ logger.info(f"Executing command: {' '.join(command)}")
46
+ for res in _cli(command, cwd, shell=False):
47
+ if print_stdout:
48
+ logger.info(res.replace("\n", ""))
utill/my_compare.py ADDED
@@ -0,0 +1,34 @@
1
+ import math
2
+
3
+
4
+ def try_float(value) -> float:
5
+ try:
6
+ return float(value)
7
+ except:
8
+ return value
9
+
10
+
11
+ def same(a, b, float_precision=None) -> tuple[bool, float]:
12
+ if a is None and b is None:
13
+ return True, None
14
+
15
+ if a is None or b is None:
16
+ return False, None
17
+
18
+ # Compare float
19
+ a_float = try_float(a)
20
+ b_float = try_float(b)
21
+ if isinstance(a_float, float) or isinstance(b_float, float):
22
+ try:
23
+ if math.isnan(a_float) and math.isnan(b_float):
24
+ return True, None
25
+
26
+ if float_precision:
27
+ a_float = round(a_float, float_precision)
28
+ b_float = round(b_float, float_precision)
29
+
30
+ return a_float == b_float, abs(a_float - b_float)
31
+ except (ValueError, TypeError):
32
+ raise Exception(f"Can't compare {a} to {b}")
33
+
34
+ return str(a) == str(b), None
utill/my_const.py CHANGED
@@ -1,4 +1,4 @@
1
- from enum import Enum
1
+ from enum import StrEnum
2
2
 
3
3
 
4
4
  class ByteSize:
@@ -8,11 +8,11 @@ class ByteSize:
8
8
  TB = 1_099_511_627_776
9
9
 
10
10
 
11
- class HttpMethod(Enum):
12
- GET = 1
13
- POST = 2
14
- PUT = 3
15
- DELETE = 4
16
-
17
- def __str__(self):
18
- return self.name
11
+ class HttpMethod(StrEnum):
12
+ GET = "GET"
13
+ POST = "POST"
14
+ PUT = "PUT"
15
+ DELETE = "DELETE"
16
+ PATCH = "PATCH"
17
+ HEAD = "HEAD"
18
+ OPTIONS = "OPTIONS"
utill/my_csv.py CHANGED
@@ -11,25 +11,32 @@ from .my_file import decompress
11
11
 
12
12
  def read_header(filename: str):
13
13
  filename = os.path.expanduser(filename)
14
- with open(filename, 'r') as f:
14
+ with open(filename, "r") as f:
15
15
  csvreader = csv.reader(f)
16
16
  return next(csvreader)
17
17
 
18
18
 
19
19
  def write(filename: str, rows: list[tuple], append: bool = False):
20
20
  filename = os.path.expanduser(filename)
21
- with open(filename, 'a' if append else 'w') as f:
21
+ with open(filename, "a" if append else "w") as f:
22
22
  csvwriter = csv.writer(f)
23
23
  csvwriter.writerows(rows)
24
24
 
25
25
 
26
- def compress(src_filename: str, keep: bool = False, max_size_bytes=ByteSize.GB, src_fopen=None, header=None, file_count=1):
26
+ def compress(
27
+ src_filename: str,
28
+ keep: bool = False,
29
+ max_size_bytes=ByteSize.GB,
30
+ src_fopen=None,
31
+ header=None,
32
+ file_count=1,
33
+ ):
27
34
  src_filename = os.path.expanduser(src_filename)
28
35
  current_size = 0
29
36
  dst_filename = f'{src_filename}_part{str(file_count).rjust(6, "0")}.gz'
30
37
  os.remove(dst_filename) if os.path.exists(dst_filename) else None
31
- logger.debug(f'📄 Compress csv {src_filename} --> {dst_filename}')
32
- gz = gzip.open(dst_filename, 'wt')
38
+ logger.debug(f"📄 Compress csv {src_filename} --> {dst_filename}")
39
+ gz = gzip.open(dst_filename, "wt")
33
40
 
34
41
  src_fopen = src_fopen or open(src_filename)
35
42
  header = header or src_fopen.readline()
@@ -42,14 +49,16 @@ def compress(src_filename: str, keep: bool = False, max_size_bytes=ByteSize.GB,
42
49
  break
43
50
 
44
51
  gz.write(line)
45
- current_size += len(line.encode('utf-8'))
52
+ current_size += len(line.encode("utf-8"))
46
53
 
47
54
  if current_size >= max_size_bytes:
48
55
  gz.close()
49
56
  yield dst_filename
50
57
 
51
58
  file_count += 1
52
- yield from compress(src_filename, keep, max_size_bytes, src_fopen, header, file_count)
59
+ yield from compress(
60
+ src_filename, keep, max_size_bytes, src_fopen, header, file_count
61
+ )
53
62
  return
54
63
 
55
64
  gz.close()
@@ -57,34 +66,46 @@ def compress(src_filename: str, keep: bool = False, max_size_bytes=ByteSize.GB,
57
66
  yield dst_filename
58
67
 
59
68
 
60
- def combine(src_filenames: list[str], dst_filename: str) -> None:
61
- csv.field_size_limit(min(sys.maxsize, 2147483646)) # FIX: _csv.Error: field larger than field limit (131072)
69
+ def combine(
70
+ src_filenames: list[str],
71
+ dst_filename: str,
72
+ gzip: bool = False,
73
+ delete: bool = False,
74
+ ) -> None:
75
+ csv.field_size_limit(
76
+ min(sys.maxsize, 2147483646)
77
+ ) # FIX: _csv.Error: field larger than field limit (131072)
62
78
 
63
- if not dst_filename.endswith('.csv'):
64
- raise ValueError('Output filename must ends with \'.csv\'!')
79
+ if not dst_filename.endswith(".csv"):
80
+ raise ValueError("Output filename must ends with '.csv'!")
65
81
 
66
- first_file = True
67
- with open(dst_filename, 'w') as fout:
82
+ first_src_file = True
83
+ with open(dst_filename, "w") as fout:
68
84
  csvwriter = csv.writer(fout)
69
85
 
70
86
  for src_filename in src_filenames:
71
87
  src_filename = os.path.expanduser(src_filename)
72
88
 
73
89
  # Decompress gzipped csv
74
- if src_filename.endswith('.csv.gz'):
90
+ if gzip:
75
91
  src_filename = decompress(src_filename)
76
92
 
77
- # Copy
78
- with open(src_filename, 'r') as fin:
93
+ # Write content into file
94
+ with open(src_filename, "r") as fin:
79
95
  csvreader = csv.reader(fin)
80
96
 
81
- # Copy the header if this is the first file
82
- if first_file:
97
+ # Write header only at first file
98
+ if first_src_file:
83
99
  csvwriter.writerow(next(csvreader))
84
- # Else, skip the header
100
+ first_src_file = False
85
101
  else:
86
102
  next(csvreader)
87
103
 
104
+ # Write body
88
105
  [csvwriter.writerow(row) for row in csvreader]
89
106
 
90
- logger.info(f'✅ Combine {src_filename}')
107
+ logger.debug(f"Combine {src_filename}")
108
+
109
+ if delete:
110
+ os.remove(src_filename)
111
+ logger.debug(f"Delete {src_filename}")
utill/my_datetime.py CHANGED
@@ -1,4 +1,6 @@
1
- from datetime import date, datetime, timedelta
1
+ from datetime import date
2
+ from datetime import datetime
3
+ from datetime import timedelta
2
4
  from enum import Enum
3
5
 
4
6
 
@@ -8,28 +10,35 @@ class Level(Enum):
8
10
 
9
11
 
10
12
  def get_current_date_str(use_separator: bool = False) -> str:
11
- return datetime.now().strftime('%Y-%m-%d' if use_separator else '%Y%m%d')
13
+ return datetime.now().strftime("%Y-%m-%d" if use_separator else "%Y%m%d")
12
14
 
13
15
 
14
- def current_datetime_str(use_separator: bool = False) -> str:
15
- return datetime.now().strftime('%Y-%m-%d %H:%M:%S' if use_separator else '%Y%m%d%H%M%S')
16
+ def get_current_datetime_str(use_separator: bool = False) -> str:
17
+ return datetime.now().strftime(
18
+ "%Y-%m-%d %H:%M:%S" if use_separator else "%Y%m%d%H%M%S"
19
+ )
16
20
 
17
21
 
18
22
  def get_month_first_and_last_day(string: str) -> tuple:
19
23
  try:
20
- dt = datetime.strptime(string, '%Y-%m')
24
+ dt = datetime.strptime(string, "%Y-%m")
21
25
  except ValueError:
22
- dt = datetime.strptime(string, '%Y-%m-%d').replace(day=1)
26
+ dt = datetime.strptime(string, "%Y-%m-%d").replace(day=1)
23
27
 
24
28
  return (dt, (dt + timedelta(days=32)).replace(day=1) - timedelta(days=1))
25
29
 
26
30
 
27
- def generate_dates(start_date: date | str, end_date: date | str, level: Level, is_output_strings: bool = False):
31
+ def generate_dates(
32
+ start_date: date | str,
33
+ end_date: date | str,
34
+ level: Level,
35
+ is_output_strings: bool = False,
36
+ ):
28
37
  # Auto convert strings
29
38
  if type(start_date) == str:
30
- start_date = datetime.strptime(start_date, '%Y-%m-%d').date()
39
+ start_date = datetime.strptime(start_date, "%Y-%m-%d").date()
31
40
  if type(end_date) == str:
32
- end_date = datetime.strptime(end_date, '%Y-%m-%d').date()
41
+ end_date = datetime.strptime(end_date, "%Y-%m-%d").date()
33
42
 
34
43
  # Auto convert datetime
35
44
  if type(start_date) == datetime:
@@ -38,7 +47,9 @@ def generate_dates(start_date: date | str, end_date: date | str, level: Level, i
38
47
  end_date = end_date.date()
39
48
 
40
49
  if start_date > end_date:
41
- raise ValueError(f'start_date \'{start_date}\' cannot be larger than end_date \'{end_date}\'')
50
+ raise ValueError(
51
+ f"start_date '{start_date}' cannot be larger than end_date '{end_date}'"
52
+ )
42
53
 
43
54
  dates: list[date] = []
44
55
 
@@ -55,9 +66,11 @@ def generate_dates(start_date: date | str, end_date: date | str, level: Level, i
55
66
  dates.append(end_date)
56
67
  end_date = end_date - timedelta(days=1)
57
68
  case _:
58
- raise ValueError(f'level \'{level}\' not recognized. available levels are: \'day\', \'month\'')
69
+ raise ValueError(
70
+ f"level '{level}' not recognized. available levels are: 'day', 'month'"
71
+ )
59
72
 
60
73
  if is_output_strings:
61
- return sorted([date.strftime('%Y-%m-%d') for date in dates])
74
+ return sorted([date.strftime("%Y-%m-%d") for date in dates])
62
75
  else:
63
76
  return sorted(dates)
utill/my_encryption.py CHANGED
@@ -5,16 +5,26 @@ from loguru import logger
5
5
 
6
6
 
7
7
  def __fernet_encrypt_or_decrypt(encrypt: bool, string: str, password: str):
8
- return Fernet(password).encrypt(string.encode()) if encrypt else Fernet(password).encrypt(string.encode())
9
-
10
-
11
- def __file_encrypt_or_decrypt(encrypt: bool, src_filename: str, password: str, dst_filename: str = None, overwrite: bool = False):
8
+ return (
9
+ Fernet(password).encrypt(string.encode())
10
+ if encrypt
11
+ else Fernet(password).encrypt(string.encode())
12
+ )
13
+
14
+
15
+ def __file_encrypt_or_decrypt(
16
+ encrypt: bool,
17
+ src_filename: str,
18
+ password: str,
19
+ dst_filename: str = None,
20
+ overwrite: bool = False,
21
+ ):
12
22
  src_filename = os.path.expanduser(src_filename)
13
23
 
14
24
  if not os.path.exists(src_filename):
15
- return ValueError(f'Source file not exists: {src_filename}')
25
+ return ValueError(f"Source file not exists: {src_filename}")
16
26
 
17
- with open(src_filename, 'r') as fr:
27
+ with open(src_filename, "r") as fr:
18
28
  # If destination file is not specified, return the encrypted string
19
29
  if not dst_filename:
20
30
  return __fernet_encrypt_or_decrypt(encrypt, fr.read(), password)
@@ -25,23 +35,31 @@ def __file_encrypt_or_decrypt(encrypt: bool, src_filename: str, password: str, d
25
35
  # Destination file exists checker
26
36
  if os.path.exists(dst_filename):
27
37
  if overwrite:
28
- return ValueError(f'Destination file exists: {dst_filename}')
38
+ return ValueError(f"Destination file exists: {dst_filename}")
29
39
  else:
30
40
  os.remove(dst_filename)
31
41
 
32
- with open(dst_filename, 'w') as fw:
42
+ with open(dst_filename, "w") as fw:
33
43
  fw.write(__fernet_encrypt_or_decrypt(encrypt, fr.read()), password)
34
44
 
35
- logger.info(f'Encrypted into {dst_filename}')
45
+ logger.info(f"Encrypted into {dst_filename}")
36
46
  return dst_filename
37
47
 
38
48
 
39
- def encrypt_file(src_filename: str, password: str, dst_filename: str = None, overwrite: bool = False) -> str:
40
- return __file_encrypt_or_decrypt(True, src_filename, password, dst_filename, overwrite)
49
+ def encrypt_file(
50
+ src_filename: str, password: str, dst_filename: str = None, overwrite: bool = False
51
+ ) -> str:
52
+ return __file_encrypt_or_decrypt(
53
+ True, src_filename, password, dst_filename, overwrite
54
+ )
41
55
 
42
56
 
43
- def decrypt_file(src_filename: str, password: str, dst_filename: str = None, overwrite: bool = False) -> str:
44
- return __file_encrypt_or_decrypt(False, src_filename, password, dst_filename, overwrite)
57
+ def decrypt_file(
58
+ src_filename: str, password: str, dst_filename: str = None, overwrite: bool = False
59
+ ) -> str:
60
+ return __file_encrypt_or_decrypt(
61
+ False, src_filename, password, dst_filename, overwrite
62
+ )
45
63
 
46
64
 
47
65
  def encrypt_string(string: str, password: str) -> str:
utill/my_env.py CHANGED
@@ -1,18 +1,23 @@
1
1
  import os
2
2
  import shutil
3
+ from typing import Optional
3
4
 
4
5
  from loguru import logger
5
6
  from pydantic_settings import BaseSettings
6
- from typing import Optional
7
7
 
8
8
  from .my_input import ask_yes_no
9
9
 
10
- ENV_DIR = os.path.expanduser(os.path.join('~', '.utill'))
11
- ENV_FILE = os.path.join(ENV_DIR, 'env')
12
10
 
13
- TEMPLATE_DIR = 'templates'
14
- TEMPLATE_PG_FILENAME = os.path.join(os.path.dirname(__file__), TEMPLATE_DIR, 'pg.json') # PostgreSQL connections
15
- TEMPLATE_MB_FILENAME = os.path.join(os.path.dirname(__file__), TEMPLATE_DIR, 'mb.json') # Metabase connections
11
+ ENV_DIR = os.path.expanduser(os.path.join("~", ".utill"))
12
+ ENV_FILE = os.path.join(ENV_DIR, "env")
13
+
14
+ TEMPLATE_DIR = "templates"
15
+ TEMPLATE_PG_FILENAME = os.path.join(
16
+ os.path.dirname(__file__), TEMPLATE_DIR, "pg.json"
17
+ ) # PostgreSQL connections
18
+ TEMPLATE_MB_FILENAME = os.path.join(
19
+ os.path.dirname(__file__), TEMPLATE_DIR, "mb.json"
20
+ ) # Metabase connections
16
21
 
17
22
  PG_FILENAME = os.path.join(ENV_DIR, os.path.basename(TEMPLATE_PG_FILENAME))
18
23
  MB_FILENAME = os.path.join(ENV_DIR, os.path.basename(TEMPLATE_MB_FILENAME))
@@ -24,39 +29,45 @@ if not os.path.exists(ENV_DIR):
24
29
 
25
30
  def init_pg_file():
26
31
  if os.path.exists(PG_FILENAME):
27
- if ask_yes_no(f'PostgreSQL connection file exists: {PG_FILENAME}, overwrite?'):
32
+ if ask_yes_no(f"PostgreSQL connection file exists: {PG_FILENAME}, overwrite?"):
28
33
  shutil.copy(TEMPLATE_PG_FILENAME, PG_FILENAME)
29
- logger.warning(f'PostgreSQL connection file overwritten! {PG_FILENAME}')
34
+ logger.warning(f"PostgreSQL connection file overwritten! {PG_FILENAME}")
30
35
  else:
31
36
  return
32
37
 
33
38
  shutil.copy(TEMPLATE_PG_FILENAME, PG_FILENAME)
34
- logger.info(f'PostgreSQL connection file created: {PG_FILENAME}')
39
+ logger.info(f"PostgreSQL connection file created: {PG_FILENAME}")
35
40
 
36
41
 
37
42
  def init_mb_file():
38
43
  if os.path.exists(MB_FILENAME):
39
- if ask_yes_no(f'Metabase connection file exists: {MB_FILENAME}, overwrite?'):
44
+ if ask_yes_no(f"Metabase connection file exists: {MB_FILENAME}, overwrite?"):
40
45
  shutil.copy(TEMPLATE_MB_FILENAME, MB_FILENAME)
41
- logger.warning(f'Metabase connection file overwritten! {MB_FILENAME}')
46
+ logger.warning(f"Metabase connection file overwritten! {MB_FILENAME}")
42
47
  else:
43
48
  return
44
49
 
45
50
  shutil.copy(TEMPLATE_MB_FILENAME, MB_FILENAME)
46
- logger.info(f'Metabase connection file created: {MB_FILENAME}')
51
+ logger.info(f"Metabase connection file created: {MB_FILENAME}")
47
52
 
48
53
 
49
54
  class Envs(BaseSettings):
50
55
 
51
56
  GCP_PROJECT_ID: Optional[str] = None
57
+ GCP_REGION: Optional[str] = None
52
58
  GCS_BUCKET: Optional[str] = None
53
59
 
54
60
  def set_var(self, k: str, v: str):
55
61
  setattr(self, k, v)
56
62
 
57
63
  def write(self):
58
- with open(ENV_FILE, 'w') as f:
59
- data = '\n'.join(['{}=\"{}\"'.format(k, str(getattr(self, k)).replace('\"', '\\\"')) for k in self.model_fields.keys()])
64
+ with open(ENV_FILE, "w") as f:
65
+ data = "\n".join(
66
+ [
67
+ '{}="{}"'.format(k, str(getattr(self, k)).replace('"', '\\"'))
68
+ for k in self.model_fields.keys()
69
+ ]
70
+ )
60
71
  f.write(data)
61
72
 
62
73
  class Config:
utill/my_file.py CHANGED
@@ -7,12 +7,12 @@ from loguru import logger
7
7
 
8
8
  def compress(src_file: str, keep: bool = False):
9
9
  src_file = os.path.expanduser(src_file)
10
- dst_file = src_file + '.gz'
10
+ dst_file = src_file + ".gz"
11
11
 
12
12
  os.remove(dst_file) if os.path.exists(dst_file) else None
13
- logger.debug(f'📄 Compress {dst_file} --> {dst_file}')
14
- with open(src_file, 'rb') as f_in:
15
- with gzip.open(dst_file, 'wb') as f_out:
13
+ logger.debug(f"📄 Compress {dst_file} --> {dst_file}")
14
+ with open(src_file, "rb") as f_in:
15
+ with gzip.open(dst_file, "wb") as f_out:
16
16
  shutil.copyfileobj(f_in, f_out)
17
17
 
18
18
  os.remove(src_file) if not keep else None
@@ -21,19 +21,19 @@ def compress(src_file: str, keep: bool = False):
21
21
 
22
22
 
23
23
  def decompress(src_file: str, keep: bool = False):
24
- if not src_file.endswith('.gz'):
25
- raise ValueError('File name not ends with .gz!')
24
+ if not src_file.endswith(".gz"):
25
+ raise ValueError("File name not ends with .gz!")
26
26
 
27
27
  src_file = os.path.expanduser(src_file)
28
- dst_file = src_file.removesuffix('.gz')
28
+ dst_file = src_file.removesuffix(".gz")
29
29
 
30
30
  os.remove(dst_file) if os.path.exists(dst_file) else None
31
- logger.debug(f'📄 Decompress {src_file} --> {dst_file}')
32
- with gzip.open(src_file, 'rb') as f_in:
33
- with open(dst_file, 'wb') as f_out:
31
+ logger.debug(f"Decompress {src_file} to {dst_file}")
32
+ with gzip.open(src_file, "rb") as f_in:
33
+ with open(dst_file, "wb") as f_out:
34
34
  shutil.copyfileobj(f_in, f_out)
35
35
 
36
- keep or os.remove(src_file)
36
+ os.remove(src_file) if not keep else None
37
37
 
38
38
  return dst_file
39
39
 
@@ -45,15 +45,17 @@ def make_sure_directory_exists(dirname: str):
45
45
 
46
46
  def make_sure_path_is_directory(path: str):
47
47
  if not path.endswith(os.sep):
48
- raise ValueError(f'Please specify directory name ending with \'{os.sep}\' character, example for Linux: \'/home/my_username/Downloads/my_folder/\'!')
48
+ raise ValueError(
49
+ f"Please specify directory name ending with '{os.sep}' character, example for Linux: '/home/my_username/Downloads/my_folder/'!"
50
+ )
49
51
 
50
52
 
51
53
  def read_last_line(filename: str) -> str:
52
54
  filename = os.path.expanduser(filename)
53
- with open(filename, 'rb') as f:
55
+ with open(filename, "rb") as f:
54
56
  try: # Catch OSError in case of a one line file
55
57
  f.seek(-2, os.SEEK_END)
56
- while f.read(1) != b'\n':
58
+ while f.read(1) != b"\n":
57
59
  f.seek(-2, os.SEEK_CUR)
58
60
  except OSError:
59
61
  f.seek(0)