rdxz2-utill 0.0.3__tar.gz → 0.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rdxz2-utill might be problematic. Click here for more details.
- {rdxz2_utill-0.0.3/src/rdxz2_utill.egg-info → rdxz2_utill-0.0.5}/PKG-INFO +1 -1
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/pyproject.toml +1 -1
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5/src/rdxz2_utill.egg-info}/PKG-INFO +1 -1
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/rdxz2_utill.egg-info/SOURCES.txt +1 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_bq.py +3 -3
- rdxz2_utill-0.0.5/src/utill/my_compare.py +34 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_csv.py +1 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_gcs.py +2 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_pg.py +21 -19
- rdxz2_utill-0.0.5/src/utill/templates/mb.json +5 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/templates/pg.json +2 -1
- rdxz2_utill-0.0.3/src/utill/templates/mb.json +0 -4
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/LICENSE +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/README.md +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/setup.cfg +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/rdxz2_utill.egg-info/dependency_links.txt +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/rdxz2_utill.egg-info/entry_points.txt +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/rdxz2_utill.egg-info/requires.txt +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/rdxz2_utill.egg-info/top_level.txt +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/__init__.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/__init__.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/_bq.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/_conf.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/_enc.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/_main.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/_pg.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/cmd/utill.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_const.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_datetime.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_dict.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_encryption.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_env.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_file.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_input.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_json.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_mb.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_queue.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_string.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_style.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_tunnel.py +0 -0
- {rdxz2_utill-0.0.3 → rdxz2_utill-0.0.5}/src/utill/my_xlsx.py +0 -0
|
@@ -12,7 +12,6 @@ from .my_const import ByteSize
|
|
|
12
12
|
from .my_csv import read_header, combine as csv_combine, compress
|
|
13
13
|
from .my_datetime import current_datetime_str
|
|
14
14
|
from .my_env import envs
|
|
15
|
-
from .my_file import make_sure_path_is_directory
|
|
16
15
|
from .my_gcs import GCS
|
|
17
16
|
from .my_queue import ThreadingQ
|
|
18
17
|
from .my_string import replace_nonnumeric
|
|
@@ -248,9 +247,10 @@ class BQ():
|
|
|
248
247
|
def download_csv(self, query: str, dst_filename: str, combine: bool = True, pre_query: str = None):
|
|
249
248
|
if not dst_filename.endswith('.csv'):
|
|
250
249
|
raise ValueError('Destination filename must ends with .csv!')
|
|
250
|
+
|
|
251
|
+
dst_filename = os.path.expanduser(dst_filename)
|
|
251
252
|
|
|
252
|
-
dirname =
|
|
253
|
-
make_sure_path_is_directory(dirname)
|
|
253
|
+
dirname = dst_filename.removesuffix('.csv')
|
|
254
254
|
|
|
255
255
|
# Remove & recreate existing folder
|
|
256
256
|
if os.path.exists(dirname):
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import math
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def try_float(value) -> float:
|
|
5
|
+
try:
|
|
6
|
+
return float(value)
|
|
7
|
+
except:
|
|
8
|
+
return value
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def same(a, b, float_precision=None) -> tuple[bool, float]:
|
|
12
|
+
if a is None and b is None:
|
|
13
|
+
return True, None
|
|
14
|
+
|
|
15
|
+
if a is None or b is None:
|
|
16
|
+
return False, None
|
|
17
|
+
|
|
18
|
+
# Compare float
|
|
19
|
+
a_float = try_float(a)
|
|
20
|
+
b_float = try_float(b)
|
|
21
|
+
if isinstance(a_float, float) or isinstance(b_float, float):
|
|
22
|
+
try:
|
|
23
|
+
if math.isnan(a_float) and math.isnan(b_float):
|
|
24
|
+
return True, None
|
|
25
|
+
|
|
26
|
+
if float_precision:
|
|
27
|
+
a_float_rounded = round(a_float, float_precision)
|
|
28
|
+
b_float_rounded = round(b_float, float_precision)
|
|
29
|
+
|
|
30
|
+
return a_float_rounded == b_float_rounded, abs(a_float - b_float)
|
|
31
|
+
except (ValueError, TypeError):
|
|
32
|
+
raise Exception(f'Can\'t compare {a} to {b}')
|
|
33
|
+
|
|
34
|
+
return str(a) == str(b), None
|
|
@@ -30,6 +30,8 @@ class GCS:
|
|
|
30
30
|
return f'{self.base_path}/{path}' if self.base_path else path
|
|
31
31
|
|
|
32
32
|
def change_bucket(self, bucket_name: str):
|
|
33
|
+
if not bucket_name:
|
|
34
|
+
raise ValueError('Bucket name needed')
|
|
33
35
|
self.bucket = self.client.bucket(bucket_name)
|
|
34
36
|
logger.debug(f'Change bucket to {self.bucket.name}')
|
|
35
37
|
|
|
@@ -99,59 +99,61 @@ class PG:
|
|
|
99
99
|
for data in copy:
|
|
100
100
|
f.write(data)
|
|
101
101
|
|
|
102
|
-
def pg_to_pg(self, pg: "PG",
|
|
102
|
+
def pg_to_pg(self, pg: "PG", src_table_name: str, dst_table_name: str, cols: list[str] = None) -> None:
|
|
103
103
|
tmp_filename = generate_random_string() + '.csv'
|
|
104
104
|
cols_str = ','.join([f'"{x}"' for x in cols]) if (cols is not None and cols != []) else '*'
|
|
105
105
|
try:
|
|
106
|
-
self.download_csv(f'SELECT {cols_str} FROM {
|
|
107
|
-
pg.upload_csv(tmp_filename,
|
|
106
|
+
self.download_csv(f'SELECT {cols_str} FROM {src_table_name}', tmp_filename)
|
|
107
|
+
pg.upload_csv(tmp_filename, dst_table_name)
|
|
108
108
|
except:
|
|
109
109
|
raise
|
|
110
110
|
finally:
|
|
111
111
|
os.remove(tmp_filename) if os.path.exists(tmp_filename) else None
|
|
112
112
|
|
|
113
113
|
def check_table_existence(self, table_name: str) -> bool:
|
|
114
|
-
if not self.execute_query('''SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" =
|
|
114
|
+
if not self.execute_query('''SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" = %s;''', table_name).fetchone()[0]:
|
|
115
115
|
raise Exception(f'Target table \'{table_name}\' not created, please create it first!')
|
|
116
116
|
|
|
117
|
-
def upload_tuples(self, cols: list[str],
|
|
118
|
-
self.check_table_existence(
|
|
117
|
+
def upload_tuples(self, cols: list[str], src_tuples: list[tuple], src_table_name: str) -> None:
|
|
118
|
+
self.check_table_existence(src_table_name)
|
|
119
119
|
|
|
120
120
|
cols_str = ','.join([f'"{x}"' for x in cols])
|
|
121
|
-
query = f'''COPY {
|
|
121
|
+
query = f'''COPY {src_table_name}({cols_str}) FROM STDIN'''
|
|
122
122
|
logger.debug(f'🔎 Query:\n{query}')
|
|
123
123
|
with self.cursor.copy(query) as copy:
|
|
124
|
-
for row in
|
|
124
|
+
for row in src_tuples:
|
|
125
125
|
copy.write_row(row)
|
|
126
126
|
|
|
127
|
-
def upload_list_of_dict(self,
|
|
128
|
-
self.check_table_existence(
|
|
127
|
+
def upload_list_of_dict(self, src_data: list[dict], dst_table_name: str) -> None:
|
|
128
|
+
self.check_table_existence(dst_table_name)
|
|
129
129
|
|
|
130
|
-
if len(
|
|
130
|
+
if len(src_data) == 0:
|
|
131
131
|
raise ValueError('No data to upload!')
|
|
132
132
|
|
|
133
|
-
cols =
|
|
133
|
+
cols = src_data[0].keys()
|
|
134
134
|
cols_str = ','.join([f'"{x}"' for x in cols])
|
|
135
|
-
query = f'''COPY {
|
|
135
|
+
query = f'''COPY {dst_table_name}({cols_str}) FROM STDIN'''
|
|
136
136
|
logger.debug(f'🔎 Query:\n{query}')
|
|
137
137
|
with self.cursor.copy(query) as copy:
|
|
138
|
-
for row in
|
|
138
|
+
for row in src_data:
|
|
139
139
|
copy.write_row(tuple(row[col] for col in cols))
|
|
140
140
|
|
|
141
|
-
def upload_csv(self,
|
|
142
|
-
|
|
141
|
+
def upload_csv(self, src_filename: str, dst_table_name: str) -> None:
|
|
142
|
+
src_filename = os.path.expanduser(src_filename)
|
|
143
143
|
|
|
144
|
-
|
|
144
|
+
self.check_table_existence(dst_table_name)
|
|
145
|
+
|
|
146
|
+
cols_str = ','.join([f'"{x}"' for x in next(csv.reader(open(src_filename, 'r')))])
|
|
145
147
|
query = dedent(
|
|
146
148
|
f'''
|
|
147
|
-
COPY {
|
|
149
|
+
COPY {dst_table_name}({cols_str})
|
|
148
150
|
FROM STDIN
|
|
149
151
|
DELIMITER ','
|
|
150
152
|
CSV HEADER;
|
|
151
153
|
'''
|
|
152
154
|
)
|
|
153
155
|
logger.debug(f'🔎 Query:\n{query}')
|
|
154
|
-
with open(os.path.expanduser(
|
|
156
|
+
with open(os.path.expanduser(src_filename), 'r') as f:
|
|
155
157
|
with self.cursor.copy(query) as copy:
|
|
156
158
|
while data := f.read(1024):
|
|
157
159
|
copy.write(data)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|