rdxz2-utill 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rdxz2-utill might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: rdxz2-utill
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: Your daily Python utility
5
5
  Author-email: Richard Dharmawan <richard.dharmawan@gmail.com>
6
6
  License: MIT License
@@ -1,18 +1,19 @@
1
- rdxz2_utill-0.0.3.dist-info/licenses/LICENSE,sha256=PF9CUvzP8XFYopEAzrMzSCovF7RdBdscPqJCDC6KjPc,1073
1
+ rdxz2_utill-0.0.5.dist-info/licenses/LICENSE,sha256=PF9CUvzP8XFYopEAzrMzSCovF7RdBdscPqJCDC6KjPc,1073
2
2
  utill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- utill/my_bq.py,sha256=ZsjRbrbczGQ70CavXfyeAPaWT1OW5LyRFBxyusNZ0cc,14701
3
+ utill/my_bq.py,sha256=V2MVVciiAR8n840Cr7hFeTuisCPEQDyhjLMM6CCwQFE,14676
4
+ utill/my_compare.py,sha256=oGK4fEAQYZqy2FH-jqRSvxANL9SsEoEaq5GOCMGoNXI,918
4
5
  utill/my_const.py,sha256=88dOqn6NPQ5-hfRqdkew5POoAIyO91XXOGvN76oNsdo,251
5
- utill/my_csv.py,sha256=76Q7IM7T_WYF5SybObN_MbkPqycirX4FnLd0DX3Kdyg,2742
6
+ utill/my_csv.py,sha256=svgu93R0pP7UW0B58eJMi0vuJnYhqMtafzCsTIk4yUU,2781
6
7
  utill/my_datetime.py,sha256=KEZTplLk3tgVqqC3wClXFcsF_zo40fma_rtPg4kSJHc,2125
7
8
  utill/my_dict.py,sha256=jPaPfdn4WYpm0uIBPiYFinpHhx1jXpFVDJ9npmvxGZQ,391
8
9
  utill/my_encryption.py,sha256=SCF7PPur39cW4RHidsRhw-9BZP-ymUH-6LZ9nAHJDsY,2105
9
10
  utill/my_env.py,sha256=mREys72Ybg2p9p2s7ApOt0s_6F5-qxR8FyYEcSJ8pmU,2093
10
11
  utill/my_file.py,sha256=H3QmIOwubQCUMoOuk7jwf6AnqsljWZIuM7OjelyZby4,1865
11
- utill/my_gcs.py,sha256=VY2CXQbzBUhX-HunvAZ_y4E19eiuZ1b3TF33bUkJfp4,3953
12
+ utill/my_gcs.py,sha256=u9rZr4e6XLLFK1QC9JknMwBeNX5MWOnVq1W50U5Jt14,4032
12
13
  utill/my_input.py,sha256=OyKLoutXpwISReltuL_Gw2oojv16tYWJqQpqabBOQx4,350
13
14
  utill/my_json.py,sha256=WgW6mavGhfs4h1N5XbhsDnRk2dbh_ttJWdJUj4iWDN4,1473
14
15
  utill/my_mb.py,sha256=3_A5kXHgnkxGbd38vK5t5MfFcj84lohjS7C2OtlSo30,14841
15
- utill/my_pg.py,sha256=udsqNok7dOFz1rO-hQhqnEj8PpH9oMdkyyjGAdLRS-w,6554
16
+ utill/my_pg.py,sha256=_kBNfm2SoUFXv3R3lFMC1RmSG8pWYnojvnfpNGYEAhU,6682
16
17
  utill/my_queue.py,sha256=hINP4_yjmboSjHgo1J3CtPm2X9SE3HfczyED3ip7nfk,1930
17
18
  utill/my_string.py,sha256=pINYFR1ligTyVZYzV8P_FolCsZQwYE1jaFNTuQ3XS_8,833
18
19
  utill/my_style.py,sha256=Wy6j4WL9RgGeX6cS9hhlOrufc9UC4UPTQ5UJa0ZJ3Yo,900
@@ -25,10 +26,10 @@ utill/cmd/_enc.py,sha256=DBy3Iwa5DTtww7lgHPRLEilrYPrWDG1vRv5PO-YzNO8,997
25
26
  utill/cmd/_main.py,sha256=UJ_XTIGDO9XPIypgHhS81SJQ_8qy8JOyw98Or0Nb2x8,273
26
27
  utill/cmd/_pg.py,sha256=RVxEiSifyIwMDYDM69vt6WSLdVDr1cMzY6r4T2PzNRA,492
27
28
  utill/cmd/utill.py,sha256=TlHfiwOUcK1m58PrRCjX9sARiPYZUsoTk-KOTCOz1vM,3558
28
- utill/templates/mb.json,sha256=BPnVhMG2FgcxnThYp04Vn5zSQI0G-yQv99qTPNvmSok,44
29
- utill/templates/pg.json,sha256=49c8AoGznP-omKGEgWlIWFpj7qIjeOC5Nf5k0DxlbHE,256
30
- rdxz2_utill-0.0.3.dist-info/METADATA,sha256=WA0g56M8_ps8WcPoISrxCE-pG8PLURq3hPez2Pmc9qs,4489
31
- rdxz2_utill-0.0.3.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
32
- rdxz2_utill-0.0.3.dist-info/entry_points.txt,sha256=9n5NWz5Wi9jDvYhB_81_4icgT5xABZ-QivHD8ibcafg,47
33
- rdxz2_utill-0.0.3.dist-info/top_level.txt,sha256=tuAYZoCsr02JYbpZj7I6fl1IIo53v3GG0uoj-_fINVk,6
34
- rdxz2_utill-0.0.3.dist-info/RECORD,,
29
+ utill/templates/mb.json,sha256=M46ZHSaSh4rbD_KGUViGr2B2ZV8_PC-O5Evqi35JK5g,59
30
+ utill/templates/pg.json,sha256=LkJt0VV3zcyt7Tpn6gulsoVQgUc-9uImXOStvzu8cdU,271
31
+ rdxz2_utill-0.0.5.dist-info/METADATA,sha256=64FDWzC8IqvqPrKxrmFZ8gKCZGIqpw70L0ns-_DeVnM,4489
32
+ rdxz2_utill-0.0.5.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
33
+ rdxz2_utill-0.0.5.dist-info/entry_points.txt,sha256=9n5NWz5Wi9jDvYhB_81_4icgT5xABZ-QivHD8ibcafg,47
34
+ rdxz2_utill-0.0.5.dist-info/top_level.txt,sha256=tuAYZoCsr02JYbpZj7I6fl1IIo53v3GG0uoj-_fINVk,6
35
+ rdxz2_utill-0.0.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (79.0.0)
2
+ Generator: setuptools (80.7.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
utill/my_bq.py CHANGED
@@ -12,7 +12,6 @@ from .my_const import ByteSize
12
12
  from .my_csv import read_header, combine as csv_combine, compress
13
13
  from .my_datetime import current_datetime_str
14
14
  from .my_env import envs
15
- from .my_file import make_sure_path_is_directory
16
15
  from .my_gcs import GCS
17
16
  from .my_queue import ThreadingQ
18
17
  from .my_string import replace_nonnumeric
@@ -248,9 +247,10 @@ class BQ():
248
247
  def download_csv(self, query: str, dst_filename: str, combine: bool = True, pre_query: str = None):
249
248
  if not dst_filename.endswith('.csv'):
250
249
  raise ValueError('Destination filename must ends with .csv!')
250
+
251
+ dst_filename = os.path.expanduser(dst_filename)
251
252
 
252
- dirname = os.path.dirname(dst_filename)
253
- make_sure_path_is_directory(dirname)
253
+ dirname = dst_filename.removesuffix('.csv')
254
254
 
255
255
  # Remove & recreate existing folder
256
256
  if os.path.exists(dirname):
utill/my_compare.py ADDED
@@ -0,0 +1,34 @@
1
+ import math
2
+
3
+
4
+ def try_float(value) -> float:
5
+ try:
6
+ return float(value)
7
+ except:
8
+ return value
9
+
10
+
11
+ def same(a, b, float_precision=None) -> tuple[bool, float]:
12
+ if a is None and b is None:
13
+ return True, None
14
+
15
+ if a is None or b is None:
16
+ return False, None
17
+
18
+ # Compare float
19
+ a_float = try_float(a)
20
+ b_float = try_float(b)
21
+ if isinstance(a_float, float) or isinstance(b_float, float):
22
+ try:
23
+ if math.isnan(a_float) and math.isnan(b_float):
24
+ return True, None
25
+
26
+ if float_precision:
27
+ a_float_rounded = round(a_float, float_precision)
28
+ b_float_rounded = round(b_float, float_precision)
29
+
30
+ return a_float_rounded == b_float_rounded, abs(a_float - b_float)
31
+ except (ValueError, TypeError):
32
+ raise Exception(f'Can\'t compare {a} to {b}')
33
+
34
+ return str(a) == str(b), None
utill/my_csv.py CHANGED
@@ -81,6 +81,7 @@ def combine(src_filenames: list[str], dst_filename: str) -> None:
81
81
  # Copy the header if this is the first file
82
82
  if first_file:
83
83
  csvwriter.writerow(next(csvreader))
84
+ first_file = False
84
85
  # Else, skip the header
85
86
  else:
86
87
  next(csvreader)
utill/my_gcs.py CHANGED
@@ -30,6 +30,8 @@ class GCS:
30
30
  return f'{self.base_path}/{path}' if self.base_path else path
31
31
 
32
32
  def change_bucket(self, bucket_name: str):
33
+ if not bucket_name:
34
+ raise ValueError('Bucket name needed')
33
35
  self.bucket = self.client.bucket(bucket_name)
34
36
  logger.debug(f'Change bucket to {self.bucket.name}')
35
37
 
utill/my_pg.py CHANGED
@@ -99,59 +99,61 @@ class PG:
99
99
  for data in copy:
100
100
  f.write(data)
101
101
 
102
- def pg_to_pg(self, pg: "PG", source_table: str, target_table: str, cols: list[str] = None) -> None:
102
+ def pg_to_pg(self, pg: "PG", src_table_name: str, dst_table_name: str, cols: list[str] = None) -> None:
103
103
  tmp_filename = generate_random_string() + '.csv'
104
104
  cols_str = ','.join([f'"{x}"' for x in cols]) if (cols is not None and cols != []) else '*'
105
105
  try:
106
- self.download_csv(f'SELECT {cols_str} FROM {source_table}', tmp_filename)
107
- pg.upload_csv(tmp_filename, target_table)
106
+ self.download_csv(f'SELECT {cols_str} FROM {src_table_name}', tmp_filename)
107
+ pg.upload_csv(tmp_filename, dst_table_name)
108
108
  except:
109
109
  raise
110
110
  finally:
111
111
  os.remove(tmp_filename) if os.path.exists(tmp_filename) else None
112
112
 
113
113
  def check_table_existence(self, table_name: str) -> bool:
114
- if not self.execute_query('''SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" = '%s';''', table_name).fetchone()['cnt']:
114
+ if not self.execute_query('''SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" = %s;''', table_name).fetchone()[0]:
115
115
  raise Exception(f'Target table \'{table_name}\' not created, please create it first!')
116
116
 
117
- def upload_tuples(self, cols: list[str], tuples: list[tuple], table_name: str) -> None:
118
- self.check_table_existence(table_name)
117
+ def upload_tuples(self, cols: list[str], src_tuples: list[tuple], src_table_name: str) -> None:
118
+ self.check_table_existence(src_table_name)
119
119
 
120
120
  cols_str = ','.join([f'"{x}"' for x in cols])
121
- query = f'''COPY {table_name}({cols_str}) FROM STDIN'''
121
+ query = f'''COPY {src_table_name}({cols_str}) FROM STDIN'''
122
122
  logger.debug(f'🔎 Query:\n{query}')
123
123
  with self.cursor.copy(query) as copy:
124
- for row in tuples:
124
+ for row in src_tuples:
125
125
  copy.write_row(row)
126
126
 
127
- def upload_list_of_dict(self, data: list[dict], table_name: str) -> None:
128
- self.check_table_existence(table_name)
127
+ def upload_list_of_dict(self, src_data: list[dict], dst_table_name: str) -> None:
128
+ self.check_table_existence(dst_table_name)
129
129
 
130
- if len(data) == 0:
130
+ if len(src_data) == 0:
131
131
  raise ValueError('No data to upload!')
132
132
 
133
- cols = data[0].keys()
133
+ cols = src_data[0].keys()
134
134
  cols_str = ','.join([f'"{x}"' for x in cols])
135
- query = f'''COPY {table_name}({cols_str}) FROM STDIN'''
135
+ query = f'''COPY {dst_table_name}({cols_str}) FROM STDIN'''
136
136
  logger.debug(f'🔎 Query:\n{query}')
137
137
  with self.cursor.copy(query) as copy:
138
- for row in data:
138
+ for row in src_data:
139
139
  copy.write_row(tuple(row[col] for col in cols))
140
140
 
141
- def upload_csv(self, file_path: str, table_name: str) -> None:
142
- self.check_table_existence(table_name)
141
+ def upload_csv(self, src_filename: str, dst_table_name: str) -> None:
142
+ src_filename = os.path.expanduser(src_filename)
143
143
 
144
- cols_str = ','.join([f'"{x}"' for x in next(csv.reader(open(file_path, 'r')))])
144
+ self.check_table_existence(dst_table_name)
145
+
146
+ cols_str = ','.join([f'"{x}"' for x in next(csv.reader(open(src_filename, 'r')))])
145
147
  query = dedent(
146
148
  f'''
147
- COPY {table_name}({cols_str})
149
+ COPY {dst_table_name}({cols_str})
148
150
  FROM STDIN
149
151
  DELIMITER ','
150
152
  CSV HEADER;
151
153
  '''
152
154
  )
153
155
  logger.debug(f'🔎 Query:\n{query}')
154
- with open(os.path.expanduser(file_path), 'r') as f:
156
+ with open(os.path.expanduser(src_filename), 'r') as f:
155
157
  with self.cursor.copy(query) as copy:
156
158
  while data := f.read(1024):
157
159
  copy.write(data)
utill/templates/mb.json CHANGED
@@ -1,4 +1,5 @@
1
1
  {
2
2
  "base_url": "xxx",
3
- "api_key": "xxx"
3
+ "api_key": "xxx",
4
+ "end": true
4
5
  }
utill/templates/pg.json CHANGED
@@ -9,5 +9,6 @@
9
9
  "db": "xxx",
10
10
  "username": "xxx",
11
11
  "password": "xxx"
12
- }
12
+ },
13
+ "end": true
13
14
  }