rdxz2-utill 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rdxz2-utill might be problematic. Click here for more details.
- {rdxz2_utill-0.1.3.dist-info → rdxz2_utill-0.1.4.dist-info}/METADATA +2 -1
- rdxz2_utill-0.1.4.dist-info/RECORD +37 -0
- utill/cmd/_bq.py +16 -3
- utill/cmd/_conf.py +15 -15
- utill/cmd/_enc.py +8 -4
- utill/cmd/_mb.py +116 -36
- utill/cmd/_pg.py +4 -2
- utill/cmd/utill.py +193 -72
- utill/my_bq.py +271 -158
- utill/my_compare.py +1 -1
- utill/my_const.py +11 -8
- utill/my_csv.py +31 -15
- utill/my_datetime.py +21 -10
- utill/my_encryption.py +31 -13
- utill/my_env.py +22 -13
- utill/my_file.py +15 -13
- utill/my_gcs.py +40 -16
- utill/my_gdrive.py +195 -0
- utill/my_input.py +8 -4
- utill/my_json.py +6 -6
- utill/my_mb.py +351 -357
- utill/my_pg.py +76 -46
- utill/my_queue.py +37 -24
- utill/my_string.py +23 -5
- utill/my_style.py +18 -16
- utill/my_tunnel.py +29 -9
- utill/my_xlsx.py +11 -8
- rdxz2_utill-0.1.3.dist-info/RECORD +0 -36
- {rdxz2_utill-0.1.3.dist-info → rdxz2_utill-0.1.4.dist-info}/WHEEL +0 -0
- {rdxz2_utill-0.1.3.dist-info → rdxz2_utill-0.1.4.dist-info}/entry_points.txt +0 -0
- {rdxz2_utill-0.1.3.dist-info → rdxz2_utill-0.1.4.dist-info}/licenses/LICENSE +0 -0
- {rdxz2_utill-0.1.3.dist-info → rdxz2_utill-0.1.4.dist-info}/top_level.txt +0 -0
utill/my_pg.py
CHANGED
|
@@ -17,27 +17,35 @@ class PG:
|
|
|
17
17
|
connection=None,
|
|
18
18
|
config_source: str | dict = PG_FILENAME,
|
|
19
19
|
autocommit: bool = True,
|
|
20
|
-
application_name: str =
|
|
20
|
+
application_name: str = "utill",
|
|
21
21
|
row_factory: psycopg.rows = psycopg.rows.tuple_row,
|
|
22
22
|
) -> None:
|
|
23
23
|
# Evaluate config source
|
|
24
24
|
if isinstance(config_source, str):
|
|
25
25
|
if not os.path.exists(config_source):
|
|
26
|
-
raise ValueError(
|
|
26
|
+
raise ValueError(
|
|
27
|
+
f"Config source file not found: {config_source}, create one with 'utill init'"
|
|
28
|
+
)
|
|
27
29
|
if connection is None:
|
|
28
|
-
raise ValueError(
|
|
29
|
-
|
|
30
|
+
raise ValueError(
|
|
31
|
+
"Connection name must be provided when using file source!"
|
|
32
|
+
)
|
|
33
|
+
conf = json.loads(open(os.path.expanduser(config_source)).read())[
|
|
34
|
+
connection
|
|
35
|
+
]
|
|
30
36
|
elif isinstance(config_source, dict):
|
|
31
37
|
conf = config_source
|
|
32
38
|
else:
|
|
33
|
-
raise ValueError(
|
|
39
|
+
raise ValueError(
|
|
40
|
+
"Config source type must be either one of string / dictonary"
|
|
41
|
+
)
|
|
34
42
|
|
|
35
43
|
(_, host, port) = establish_tunnel(conf)
|
|
36
44
|
self.db_host = host
|
|
37
45
|
self.db_port = port
|
|
38
|
-
self.db_username = conf[
|
|
39
|
-
self.db_password = conf[
|
|
40
|
-
self.db_name = conf[
|
|
46
|
+
self.db_username = conf["username"]
|
|
47
|
+
self.db_password = conf["password"]
|
|
48
|
+
self.db_name = conf["db"]
|
|
41
49
|
self.conf = conf
|
|
42
50
|
|
|
43
51
|
self.conn = None
|
|
@@ -45,12 +53,12 @@ class PG:
|
|
|
45
53
|
self.row_factory = row_factory
|
|
46
54
|
|
|
47
55
|
conninfo = {
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
56
|
+
"host": self.db_host,
|
|
57
|
+
"port": self.db_port,
|
|
58
|
+
"user": self.db_username,
|
|
59
|
+
"password": self.db_password,
|
|
60
|
+
"dbname": self.db_name,
|
|
61
|
+
"application_name": application_name,
|
|
54
62
|
}
|
|
55
63
|
self.dsn = psycopg.conninfo.make_conninfo(**conninfo)
|
|
56
64
|
self.establish_connection(autocommit, row_factory)
|
|
@@ -64,7 +72,9 @@ class PG:
|
|
|
64
72
|
def establish_connection(self, autocommit: bool, row_factory: psycopg.rows):
|
|
65
73
|
self.conn = psycopg.connect(self.dsn, autocommit=autocommit)
|
|
66
74
|
self.cursor = self.conn.cursor(row_factory=row_factory)
|
|
67
|
-
logger.debug(
|
|
75
|
+
logger.debug(
|
|
76
|
+
f"PG client open: {self.db_username}@{self.db_host}:{self.db_port}/{self.db_name}, autocommit={self.conn.autocommit}"
|
|
77
|
+
)
|
|
68
78
|
|
|
69
79
|
def change_autocommit(self, autocommit: bool):
|
|
70
80
|
if autocommit == self.conn.autocommit:
|
|
@@ -78,33 +88,39 @@ class PG:
|
|
|
78
88
|
self.establish_connection(self.conn.autocommit, self.row_factory)
|
|
79
89
|
|
|
80
90
|
query = query.strip()
|
|
81
|
-
logger.debug(f
|
|
91
|
+
logger.debug(f"🔎 Query:\n{query}")
|
|
82
92
|
|
|
83
93
|
return self.cursor.execute(query, params)
|
|
84
94
|
|
|
85
95
|
def download_csv(self, query: str, file_path: str) -> None:
|
|
86
96
|
query = dedent(
|
|
87
|
-
f
|
|
97
|
+
f"""
|
|
88
98
|
COPY ({query})
|
|
89
99
|
TO STDOUT
|
|
90
100
|
WITH DELIMITER ','
|
|
91
101
|
CSV HEADER;
|
|
92
|
-
|
|
102
|
+
"""
|
|
93
103
|
)
|
|
94
|
-
logger.debug(f
|
|
95
|
-
with open(os.path.expanduser(file_path),
|
|
104
|
+
logger.debug(f"🔎 Query:\n{query}")
|
|
105
|
+
with open(os.path.expanduser(file_path), "wb") as f:
|
|
96
106
|
with self.cursor.copy(query) as copy:
|
|
97
107
|
for data in copy:
|
|
98
108
|
f.write(data)
|
|
99
109
|
|
|
100
|
-
def pg_to_pg(
|
|
110
|
+
def pg_to_pg(
|
|
111
|
+
self, pg: "PG", src_table_name: str, dst_table_name: str, cols: list[str] = None
|
|
112
|
+
) -> None:
|
|
101
113
|
self.ensure_table_exists(src_table_name)
|
|
102
114
|
pg.ensure_table_exists(dst_table_name)
|
|
103
115
|
|
|
104
|
-
tmp_filename = generate_random_string(alphanum=True) +
|
|
105
|
-
cols_str =
|
|
116
|
+
tmp_filename = generate_random_string(alphanum=True) + ".csv"
|
|
117
|
+
cols_str = (
|
|
118
|
+
",".join([f'"{x}"' for x in cols])
|
|
119
|
+
if (cols is not None and cols != [])
|
|
120
|
+
else "*"
|
|
121
|
+
)
|
|
106
122
|
try:
|
|
107
|
-
self.download_csv(f
|
|
123
|
+
self.download_csv(f"SELECT {cols_str} FROM {src_table_name}", tmp_filename)
|
|
108
124
|
pg.upload_csv(tmp_filename, dst_table_name)
|
|
109
125
|
except:
|
|
110
126
|
raise
|
|
@@ -112,15 +128,22 @@ class PG:
|
|
|
112
128
|
os.remove(tmp_filename) if os.path.exists(tmp_filename) else None
|
|
113
129
|
|
|
114
130
|
def ensure_table_exists(self, table_name: str) -> bool:
|
|
115
|
-
if not self.execute_query(
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
131
|
+
if not self.execute_query(
|
|
132
|
+
"""SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" = %s;""",
|
|
133
|
+
(table_name,),
|
|
134
|
+
).fetchone()[0]:
|
|
135
|
+
raise Exception(
|
|
136
|
+
f"Target table '{table_name}' not created, please create it first!"
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
def upload_tuples(
|
|
140
|
+
self, cols: list[str], src_tuples: list[tuple], src_table_name: str
|
|
141
|
+
) -> None:
|
|
119
142
|
self.ensure_table_exists(src_table_name)
|
|
120
143
|
|
|
121
|
-
cols_str =
|
|
122
|
-
query = f
|
|
123
|
-
logger.debug(f
|
|
144
|
+
cols_str = ",".join([f'"{x}"' for x in cols])
|
|
145
|
+
query = f"""COPY {src_table_name}({cols_str}) FROM STDIN"""
|
|
146
|
+
logger.debug(f"🔎 Query:\n{query}")
|
|
124
147
|
with self.cursor.copy(query) as copy:
|
|
125
148
|
for row in src_tuples:
|
|
126
149
|
copy.write_row(row)
|
|
@@ -129,12 +152,12 @@ class PG:
|
|
|
129
152
|
self.ensure_table_exists(dst_table_name)
|
|
130
153
|
|
|
131
154
|
if len(src_data) == 0:
|
|
132
|
-
raise ValueError(
|
|
155
|
+
raise ValueError("No data to upload!")
|
|
133
156
|
|
|
134
157
|
cols = src_data[0].keys()
|
|
135
|
-
cols_str =
|
|
136
|
-
query = f
|
|
137
|
-
logger.debug(f
|
|
158
|
+
cols_str = ",".join([f'"{x}"' for x in cols])
|
|
159
|
+
query = f"""COPY {dst_table_name}({cols_str}) FROM STDIN"""
|
|
160
|
+
logger.debug(f"🔎 Query:\n{query}")
|
|
138
161
|
with self.cursor.copy(query) as copy:
|
|
139
162
|
for row in src_data:
|
|
140
163
|
copy.write_row(tuple(row[col] for col in cols))
|
|
@@ -144,39 +167,46 @@ class PG:
|
|
|
144
167
|
|
|
145
168
|
self.ensure_table_exists(dst_table_name)
|
|
146
169
|
|
|
147
|
-
cols_str =
|
|
170
|
+
cols_str = ",".join(
|
|
171
|
+
[f'"{x}"' for x in next(csv.reader(open(src_filename, "r")))]
|
|
172
|
+
)
|
|
148
173
|
query = dedent(
|
|
149
|
-
f
|
|
174
|
+
f"""
|
|
150
175
|
COPY {dst_table_name}({cols_str})
|
|
151
176
|
FROM STDIN
|
|
152
177
|
DELIMITER ','
|
|
153
178
|
CSV HEADER;
|
|
154
|
-
|
|
179
|
+
"""
|
|
155
180
|
)
|
|
156
|
-
logger.debug(f
|
|
157
|
-
with open(os.path.expanduser(src_filename),
|
|
181
|
+
logger.debug(f"🔎 Query:\n{query}")
|
|
182
|
+
with open(os.path.expanduser(src_filename), "r") as f:
|
|
158
183
|
with self.cursor.copy(query) as copy:
|
|
159
184
|
while data := f.read(1024):
|
|
160
185
|
copy.write(data)
|
|
161
186
|
|
|
162
|
-
def create_index(
|
|
187
|
+
def create_index(
|
|
188
|
+
self, table_name: str, index: str | list[str], unique: bool = False
|
|
189
|
+
) -> None:
|
|
163
190
|
try:
|
|
164
191
|
index = index if type(index) == list else [index]
|
|
165
|
-
indexes =
|
|
166
|
-
self.execute_query(
|
|
192
|
+
indexes = ",".join([f'"{x}"' for x in index])
|
|
193
|
+
self.execute_query(
|
|
194
|
+
f'CREATE {"UNIQUE " if unique else ""}INDEX ON "{table_name}" ({indexes});',
|
|
195
|
+
return_df=False,
|
|
196
|
+
)
|
|
167
197
|
except Exception as e:
|
|
168
198
|
self.rollback()
|
|
169
199
|
raise e
|
|
170
200
|
|
|
171
201
|
def rollback(self):
|
|
172
202
|
self.conn.rollback()
|
|
173
|
-
logger.debug(
|
|
203
|
+
logger.debug("🚫 Transaction rollback")
|
|
174
204
|
|
|
175
205
|
def commit(self):
|
|
176
206
|
self.conn.commit()
|
|
177
|
-
logger.debug(
|
|
207
|
+
logger.debug("✅ Transaction commit")
|
|
178
208
|
|
|
179
209
|
def close(self):
|
|
180
210
|
self.cursor.close()
|
|
181
211
|
self.conn.close()
|
|
182
|
-
logger.debug(
|
|
212
|
+
logger.debug("PG client close")
|
utill/my_queue.py
CHANGED
|
@@ -3,12 +3,19 @@ from typing import Callable
|
|
|
3
3
|
import concurrent.futures
|
|
4
4
|
import queue
|
|
5
5
|
|
|
6
|
+
|
|
6
7
|
class StreamingQ:
|
|
7
|
-
def __init__(
|
|
8
|
+
def __init__(
|
|
9
|
+
self,
|
|
10
|
+
producer_func: Callable,
|
|
11
|
+
producer_args: tuple,
|
|
12
|
+
consumer_func: Callable,
|
|
13
|
+
max_queue_size: int = 0,
|
|
14
|
+
):
|
|
8
15
|
self.producer_func = producer_func
|
|
9
16
|
self.producer_args = producer_args
|
|
10
17
|
self.consumer_func = consumer_func
|
|
11
|
-
|
|
18
|
+
|
|
12
19
|
# Use maxsize for backpressure control (0 = unlimited)
|
|
13
20
|
self.q = queue.Queue(maxsize=max_queue_size)
|
|
14
21
|
|
|
@@ -17,68 +24,68 @@ class StreamingQ:
|
|
|
17
24
|
Execute producer and consumer with true streaming using generators.
|
|
18
25
|
Yields consumer results as they become available.
|
|
19
26
|
"""
|
|
27
|
+
|
|
20
28
|
def producer():
|
|
21
29
|
try:
|
|
22
30
|
for item in self.producer_func(*self.producer_args):
|
|
23
31
|
self.q.put(item)
|
|
24
|
-
logger.debug(f
|
|
32
|
+
logger.debug(f"🌾 Produced {item}")
|
|
25
33
|
except Exception as e:
|
|
26
|
-
logger.error(f
|
|
27
|
-
self.q.put((
|
|
34
|
+
logger.error(f"Producer error: {e}")
|
|
35
|
+
self.q.put(("ERROR", e))
|
|
28
36
|
finally:
|
|
29
37
|
# Signal end of production
|
|
30
38
|
self.q.put(None)
|
|
31
|
-
logger.debug(
|
|
39
|
+
logger.debug("🌾 Producer finished")
|
|
32
40
|
|
|
33
41
|
def consumer():
|
|
34
42
|
while True:
|
|
35
43
|
item = self.q.get()
|
|
36
|
-
|
|
44
|
+
|
|
37
45
|
if item is None:
|
|
38
46
|
# End of stream signal
|
|
39
47
|
self.q.task_done()
|
|
40
48
|
break
|
|
41
|
-
|
|
42
|
-
if isinstance(item, tuple) and item[0] ==
|
|
49
|
+
|
|
50
|
+
if isinstance(item, tuple) and item[0] == "ERROR":
|
|
43
51
|
# Propagate producer error
|
|
44
52
|
self.q.task_done()
|
|
45
53
|
raise item[1]
|
|
46
|
-
|
|
54
|
+
|
|
47
55
|
try:
|
|
48
56
|
# Unpack item if it's a tuple, otherwise pass as single arg
|
|
49
57
|
if isinstance(item, tuple):
|
|
50
58
|
result = self.consumer_func(*item)
|
|
51
59
|
else:
|
|
52
60
|
result = self.consumer_func(item)
|
|
53
|
-
|
|
61
|
+
|
|
54
62
|
self.q.task_done()
|
|
55
|
-
logger.debug(f
|
|
63
|
+
logger.debug(f"🔥 Consumed {item} -> {result}")
|
|
56
64
|
yield result
|
|
57
|
-
|
|
65
|
+
|
|
58
66
|
except Exception as e:
|
|
59
67
|
self.q.task_done()
|
|
60
|
-
logger.error(f
|
|
68
|
+
logger.error(f"Consumer error processing {item}: {e}")
|
|
61
69
|
raise
|
|
62
70
|
|
|
63
71
|
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
|
|
64
72
|
# Start producer in background
|
|
65
73
|
future_producer = executor.submit(producer)
|
|
66
|
-
|
|
74
|
+
|
|
67
75
|
try:
|
|
68
76
|
# Yield results as they become available
|
|
69
77
|
for result in consumer():
|
|
70
78
|
yield result
|
|
71
|
-
|
|
79
|
+
|
|
72
80
|
# Wait for producer to complete
|
|
73
81
|
future_producer.result()
|
|
74
|
-
|
|
82
|
+
|
|
75
83
|
except Exception as e:
|
|
76
84
|
# Cancel producer if consumer fails
|
|
77
85
|
future_producer.cancel()
|
|
78
86
|
raise
|
|
79
87
|
|
|
80
88
|
|
|
81
|
-
|
|
82
89
|
class ThreadingQ:
|
|
83
90
|
def __init__(self) -> None:
|
|
84
91
|
self.q = queue.Queue()
|
|
@@ -98,8 +105,14 @@ class ThreadingQ:
|
|
|
98
105
|
return self
|
|
99
106
|
|
|
100
107
|
def execute(self):
|
|
101
|
-
if not all(
|
|
102
|
-
|
|
108
|
+
if not all(
|
|
109
|
+
[
|
|
110
|
+
self.producer_func is not None,
|
|
111
|
+
self.producer_args is not None,
|
|
112
|
+
self.consumer_func is not None,
|
|
113
|
+
]
|
|
114
|
+
):
|
|
115
|
+
raise Exception("Producer and Consumer functions must be defined!")
|
|
103
116
|
|
|
104
117
|
def producer():
|
|
105
118
|
results = []
|
|
@@ -107,7 +120,7 @@ class ThreadingQ:
|
|
|
107
120
|
for item in self.producer_func(*self.producer_args):
|
|
108
121
|
self.q.put(item)
|
|
109
122
|
results.append(item)
|
|
110
|
-
logger.debug(f
|
|
123
|
+
logger.debug(f"🌾 Produced {item}")
|
|
111
124
|
|
|
112
125
|
self.q.put(None)
|
|
113
126
|
return results
|
|
@@ -124,7 +137,7 @@ class ThreadingQ:
|
|
|
124
137
|
results.append(result)
|
|
125
138
|
|
|
126
139
|
self.q.task_done()
|
|
127
|
-
logger.debug(f
|
|
140
|
+
logger.debug(f"🔥 Consumed {item}")
|
|
128
141
|
|
|
129
142
|
return results
|
|
130
143
|
|
|
@@ -134,8 +147,8 @@ class ThreadingQ:
|
|
|
134
147
|
self.future_consumer = executor.submit(consumer)
|
|
135
148
|
|
|
136
149
|
producer_result = self.future_producer.result()
|
|
137
|
-
logger.debug(
|
|
150
|
+
logger.debug("✅ Producer done")
|
|
138
151
|
consumer_result = self.future_consumer.result()
|
|
139
|
-
logger.debug(
|
|
152
|
+
logger.debug("✅ Consumer done")
|
|
140
153
|
|
|
141
154
|
return producer_result, consumer_result
|
utill/my_string.py
CHANGED
|
@@ -4,17 +4,35 @@ import re
|
|
|
4
4
|
import string
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
def generate_random_string(length: int = 4, alphanum: bool = False):
|
|
7
|
+
def generate_random_string(length: int = 4, alphanum: bool = False):
|
|
8
|
+
return "".join(
|
|
9
|
+
random.choice(
|
|
10
|
+
string.ascii_letters
|
|
11
|
+
+ string.digits
|
|
12
|
+
+ (r'!@#$%^&*()-=_+[]{};\':",./<>?' if not alphanum else "")
|
|
13
|
+
)
|
|
14
|
+
for _ in range(length)
|
|
15
|
+
)
|
|
8
16
|
|
|
9
17
|
|
|
10
|
-
def replace_nonnumeric(string: str, replace: str) -> str:
|
|
18
|
+
def replace_nonnumeric(string: str, replace: str) -> str:
|
|
19
|
+
return re.sub("[^0-9a-zA-Z]+", replace, string)
|
|
11
20
|
|
|
12
21
|
|
|
13
|
-
def mask(
|
|
22
|
+
def mask(
|
|
23
|
+
string: str,
|
|
24
|
+
mask_length_min: int = 5,
|
|
25
|
+
mask_length_max: int = 50,
|
|
26
|
+
display_length: int = 5,
|
|
27
|
+
) -> str:
|
|
14
28
|
if not string:
|
|
15
29
|
mask_length = mask_length_min
|
|
16
30
|
else:
|
|
17
31
|
hash_value = int(hashlib.sha256(string.encode()).hexdigest(), 16)
|
|
18
|
-
mask_length = mask_length_min + (
|
|
32
|
+
mask_length = mask_length_min + (
|
|
33
|
+
hash_value % (mask_length_max - mask_length_min + 1)
|
|
34
|
+
)
|
|
19
35
|
|
|
20
|
-
return (
|
|
36
|
+
return ("*" * mask_length) + (
|
|
37
|
+
string[(-display_length if len(string) > display_length else -1) :]
|
|
38
|
+
)
|
utill/my_style.py
CHANGED
|
@@ -1,24 +1,26 @@
|
|
|
1
1
|
class Styles:
|
|
2
|
-
NONE =
|
|
3
|
-
ITALIC =
|
|
4
|
-
BOLD =
|
|
5
|
-
UNDERLINE =
|
|
2
|
+
NONE = "\033[0m"
|
|
3
|
+
ITALIC = "\033[3m"
|
|
4
|
+
BOLD = "\033[1m"
|
|
5
|
+
UNDERLINE = "\033[4m"
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
class Colors:
|
|
9
|
-
HEADER =
|
|
10
|
-
OKBLUE =
|
|
11
|
-
OKCYAN =
|
|
12
|
-
OKGREEN =
|
|
13
|
-
WARNING =
|
|
14
|
-
RED =
|
|
15
|
-
BOLD =
|
|
16
|
-
UNDERLINE =
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
def make_style(
|
|
9
|
+
HEADER = "\033[95m"
|
|
10
|
+
OKBLUE = "\033[94m"
|
|
11
|
+
OKCYAN = "\033[96m"
|
|
12
|
+
OKGREEN = "\033[92m"
|
|
13
|
+
WARNING = "\033[93m"
|
|
14
|
+
RED = "\033[91m"
|
|
15
|
+
BOLD = "\033[1m"
|
|
16
|
+
UNDERLINE = "\033[4m"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def make_style(
|
|
20
|
+
styles_or_colors: list[Styles | Colors] | Styles | Colors, string: str
|
|
21
|
+
) -> str:
|
|
20
22
|
if type(styles_or_colors) == list:
|
|
21
|
-
return
|
|
23
|
+
return "".join(styles_or_colors) + string + Styles.NONE
|
|
22
24
|
else:
|
|
23
25
|
return styles_or_colors + string + Styles.NONE
|
|
24
26
|
|
utill/my_tunnel.py
CHANGED
|
@@ -3,7 +3,7 @@ import socket
|
|
|
3
3
|
from loguru import logger
|
|
4
4
|
from sshtunnel import SSHTunnelForwarder
|
|
5
5
|
|
|
6
|
-
LOCALHOST =
|
|
6
|
+
LOCALHOST = "127.0.0.1"
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
def _get_random_port() -> int:
|
|
@@ -12,7 +12,15 @@ def _get_random_port() -> int:
|
|
|
12
12
|
return s.getsockname()[1]
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
def start_tunnel(
|
|
15
|
+
def start_tunnel(
|
|
16
|
+
host: str,
|
|
17
|
+
port: int,
|
|
18
|
+
user: str,
|
|
19
|
+
key: str,
|
|
20
|
+
target_host: str,
|
|
21
|
+
target_port: int,
|
|
22
|
+
local_port: int = None,
|
|
23
|
+
) -> int:
|
|
16
24
|
local_port = local_port or _get_random_port()
|
|
17
25
|
|
|
18
26
|
tunnel = SSHTunnelForwarder(
|
|
@@ -29,14 +37,26 @@ def start_tunnel(host: str, port: int, user: str, key: str, target_host: str, ta
|
|
|
29
37
|
|
|
30
38
|
|
|
31
39
|
def establish_tunnel(conf: dict, local_port: int = None) -> tuple:
|
|
32
|
-
using_tunnel = bool(conf.get(
|
|
33
|
-
local_host = LOCALHOST if using_tunnel else conf[
|
|
34
|
-
|
|
35
|
-
z =
|
|
36
|
-
|
|
37
|
-
|
|
40
|
+
using_tunnel = bool(conf.get("tunnel_host"))
|
|
41
|
+
local_host = LOCALHOST if using_tunnel else conf["host"]
|
|
42
|
+
|
|
43
|
+
z = (
|
|
44
|
+
start_tunnel(
|
|
45
|
+
conf["tunnel_host"],
|
|
46
|
+
conf["tunnel_port"],
|
|
47
|
+
conf["tunnel_username"],
|
|
48
|
+
conf["tunnel_key"],
|
|
49
|
+
conf["host"],
|
|
50
|
+
conf["port"],
|
|
51
|
+
local_port=local_port,
|
|
52
|
+
)
|
|
53
|
+
if using_tunnel
|
|
54
|
+
else (None, local_host, conf["port"])
|
|
55
|
+
)
|
|
38
56
|
|
|
39
57
|
if using_tunnel:
|
|
40
|
-
logger.debug(
|
|
58
|
+
logger.debug(
|
|
59
|
+
f'🛣️ Tunnel established: {conf["host"]}:{conf["port"]} --> {conf["tunnel_username"]}@{conf["tunnel_host"]} --> {z[1]}:{z[2]}'
|
|
60
|
+
)
|
|
41
61
|
|
|
42
62
|
return z
|
utill/my_xlsx.py
CHANGED
|
@@ -6,16 +6,19 @@ from loguru import logger
|
|
|
6
6
|
|
|
7
7
|
def xlsx_to_csv(filename: str, sheet: str):
|
|
8
8
|
con = duckdb.connect()
|
|
9
|
-
return
|
|
10
|
-
.execute(
|
|
11
|
-
.execute(
|
|
9
|
+
return (
|
|
10
|
+
con.execute("install spatial;")
|
|
11
|
+
.execute("load spatial;")
|
|
12
|
+
.execute(f"select * from st_read('{filename}', layer='{sheet}');")
|
|
12
13
|
.fetchall()
|
|
14
|
+
)
|
|
13
15
|
|
|
14
16
|
|
|
15
17
|
def csv_to_xlsx(filename: str, output_file_path: str):
|
|
16
|
-
logger.info(f
|
|
18
|
+
logger.info(f"Converting csv '{filename}' into xlsx '{output_file_path}' ...")
|
|
17
19
|
con = duckdb.connect()
|
|
18
|
-
con.execute(
|
|
19
|
-
.
|
|
20
|
-
|
|
21
|
-
|
|
20
|
+
con.execute("install spatial;").execute("load spatial;").execute(
|
|
21
|
+
f"set threads to {multiprocessing.cpu_count()};"
|
|
22
|
+
).execute(
|
|
23
|
+
f"copy '{filename}' to '{output_file_path}' with(format gdal, driver 'xlsx')"
|
|
24
|
+
)
|
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
rdxz2_utill-0.1.3.dist-info/licenses/LICENSE,sha256=PF9CUvzP8XFYopEAzrMzSCovF7RdBdscPqJCDC6KjPc,1073
|
|
2
|
-
utill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
utill/my_bq.py,sha256=U3YlLTEQfDSsIbDi2Cjw8fv51uwjavNscLo3dZ-R4fU,25452
|
|
4
|
-
utill/my_compare.py,sha256=619QbVk3GihWxen95yVnivKHkah8GgPTLGiSkgHxykw,886
|
|
5
|
-
utill/my_const.py,sha256=88dOqn6NPQ5-hfRqdkew5POoAIyO91XXOGvN76oNsdo,251
|
|
6
|
-
utill/my_csv.py,sha256=AT5sAbAlYqnAmNgQMTSqEueRXM4D42yNPb5C3Hedy6c,2921
|
|
7
|
-
utill/my_datetime.py,sha256=8AUO9l_MSzdthRsgASuyGZpvjgpoQb9Lowt4goHjyqw,2129
|
|
8
|
-
utill/my_dict.py,sha256=jPaPfdn4WYpm0uIBPiYFinpHhx1jXpFVDJ9npmvxGZQ,391
|
|
9
|
-
utill/my_encryption.py,sha256=SCF7PPur39cW4RHidsRhw-9BZP-ymUH-6LZ9nAHJDsY,2105
|
|
10
|
-
utill/my_env.py,sha256=E7XW3fuhxbDlFqmLPHrziJJZVRogzGh6rfQdyNV49f8,2130
|
|
11
|
-
utill/my_file.py,sha256=-b6_dGDDBdS228kgwTYpmIa3vxW1c1TtWrLdzdlHjKY,1873
|
|
12
|
-
utill/my_gcs.py,sha256=qFH47gQmk6_v0DajihymX7Xx1ZVp4xWuxDQh90PrY0g,3088
|
|
13
|
-
utill/my_input.py,sha256=OyKLoutXpwISReltuL_Gw2oojv16tYWJqQpqabBOQx4,350
|
|
14
|
-
utill/my_json.py,sha256=WgW6mavGhfs4h1N5XbhsDnRk2dbh_ttJWdJUj4iWDN4,1473
|
|
15
|
-
utill/my_mb.py,sha256=IyrySs92TqtjBUvPMeUN3P2kRK8EttTFRPZsv5Cr-xw,15090
|
|
16
|
-
utill/my_pg.py,sha256=J9USygc-oug4w7AkBacA9x043jHZrDfQPGFEqXavZAY,6799
|
|
17
|
-
utill/my_queue.py,sha256=Qf3Nm_ZRoVD34oAoym8A9hoH9Y27kUHeWLhylAUj5Q4,4749
|
|
18
|
-
utill/my_string.py,sha256=pINYFR1ligTyVZYzV8P_FolCsZQwYE1jaFNTuQ3XS_8,833
|
|
19
|
-
utill/my_style.py,sha256=Wy6j4WL9RgGeX6cS9hhlOrufc9UC4UPTQ5UJa0ZJ3Yo,900
|
|
20
|
-
utill/my_tunnel.py,sha256=uCpGtiG8AcRYiaN7rLnTulsZI4iFTRM8EHxwyAAfDrE,1292
|
|
21
|
-
utill/my_xlsx.py,sha256=YcQRp6DC9girSS1fkUPVKsHspyQpr8JC8GymSSnRV-w,729
|
|
22
|
-
utill/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
23
|
-
utill/cmd/_bq.py,sha256=MQGLIv_WBUBl2tf18bfYrAszx0Koa5kdTW1c8A5HDDg,520
|
|
24
|
-
utill/cmd/_conf.py,sha256=DKl3IVVLp6-5P43tvh6bYmHR5rOL9XnKVuQ7kQJtzrc,1863
|
|
25
|
-
utill/cmd/_enc.py,sha256=DBy3Iwa5DTtww7lgHPRLEilrYPrWDG1vRv5PO-YzNO8,997
|
|
26
|
-
utill/cmd/_main.py,sha256=UJ_XTIGDO9XPIypgHhS81SJQ_8qy8JOyw98Or0Nb2x8,273
|
|
27
|
-
utill/cmd/_mb.py,sha256=dtYk9HriSaYsn-6ZFIJhs8OPHUqI5fLSZosEHNuwykU,1823
|
|
28
|
-
utill/cmd/_pg.py,sha256=RVxEiSifyIwMDYDM69vt6WSLdVDr1cMzY6r4T2PzNRA,492
|
|
29
|
-
utill/cmd/utill.py,sha256=ZopLhpPo3Qn8B_Un64qVwuB1oGaAacgN6DVPCTK5V_o,4849
|
|
30
|
-
utill/templates/mb.json,sha256=M46ZHSaSh4rbD_KGUViGr2B2ZV8_PC-O5Evqi35JK5g,59
|
|
31
|
-
utill/templates/pg.json,sha256=LkJt0VV3zcyt7Tpn6gulsoVQgUc-9uImXOStvzu8cdU,271
|
|
32
|
-
rdxz2_utill-0.1.3.dist-info/METADATA,sha256=mJqMSx_wABv4itLJfc6USR1q1Oyd-u91qfyXpFTmtMM,4401
|
|
33
|
-
rdxz2_utill-0.1.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
34
|
-
rdxz2_utill-0.1.3.dist-info/entry_points.txt,sha256=9n5NWz5Wi9jDvYhB_81_4icgT5xABZ-QivHD8ibcafg,47
|
|
35
|
-
rdxz2_utill-0.1.3.dist-info/top_level.txt,sha256=tuAYZoCsr02JYbpZj7I6fl1IIo53v3GG0uoj-_fINVk,6
|
|
36
|
-
rdxz2_utill-0.1.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|