rdxz2-utill 0.0.3__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
utill/my_pg.py CHANGED
@@ -1,12 +1,12 @@
1
1
  import csv
2
2
  import json
3
3
  import os
4
+ from textwrap import dedent
5
+
4
6
  import psycopg
5
7
  import psycopg.conninfo
6
8
  import psycopg.rows
7
-
8
9
  from loguru import logger
9
- from textwrap import dedent
10
10
 
11
11
  from .my_env import PG_FILENAME
12
12
  from .my_string import generate_random_string
@@ -19,27 +19,35 @@ class PG:
19
19
  connection=None,
20
20
  config_source: str | dict = PG_FILENAME,
21
21
  autocommit: bool = True,
22
- application_name: str = 'utill',
22
+ application_name: str = "utill",
23
23
  row_factory: psycopg.rows = psycopg.rows.tuple_row,
24
24
  ) -> None:
25
25
  # Evaluate config source
26
26
  if isinstance(config_source, str):
27
27
  if not os.path.exists(config_source):
28
- raise ValueError(f'Config source file not found: {config_source}, create one with \'utill init\'')
28
+ raise ValueError(
29
+ f"Config source file not found: {config_source}, create one with 'utill init'"
30
+ )
29
31
  if connection is None:
30
- raise ValueError('Connection name must be provided when using file source!')
31
- conf = json.loads(open(os.path.expanduser(config_source)).read())[connection]
32
+ raise ValueError(
33
+ "Connection name must be provided when using file source!"
34
+ )
35
+ conf = json.loads(open(os.path.expanduser(config_source)).read())[
36
+ connection
37
+ ]
32
38
  elif isinstance(config_source, dict):
33
39
  conf = config_source
34
40
  else:
35
- raise ValueError('Config source type must be either one of string / dictonary')
41
+ raise ValueError(
42
+ "Config source type must be either one of string / dictonary"
43
+ )
36
44
 
37
45
  (_, host, port) = establish_tunnel(conf)
38
46
  self.db_host = host
39
47
  self.db_port = port
40
- self.db_username = conf['username']
41
- self.db_password = conf['password']
42
- self.db_name = conf['db']
48
+ self.db_username = conf["username"]
49
+ self.db_password = conf["password"]
50
+ self.db_name = conf["db"]
43
51
  self.conf = conf
44
52
 
45
53
  self.conn = None
@@ -47,12 +55,12 @@ class PG:
47
55
  self.row_factory = row_factory
48
56
 
49
57
  conninfo = {
50
- 'host': self.db_host,
51
- 'port': self.db_port,
52
- 'user': self.db_username,
53
- 'password': self.db_password,
54
- 'dbname': self.db_name,
55
- 'application_name': application_name,
58
+ "host": self.db_host,
59
+ "port": self.db_port,
60
+ "user": self.db_username,
61
+ "password": self.db_password,
62
+ "dbname": self.db_name,
63
+ "application_name": application_name,
56
64
  }
57
65
  self.dsn = psycopg.conninfo.make_conninfo(**conninfo)
58
66
  self.establish_connection(autocommit, row_factory)
@@ -66,7 +74,9 @@ class PG:
66
74
  def establish_connection(self, autocommit: bool, row_factory: psycopg.rows):
67
75
  self.conn = psycopg.connect(self.dsn, autocommit=autocommit)
68
76
  self.cursor = self.conn.cursor(row_factory=row_factory)
69
- logger.debug(f'PG client open: {self.db_username}@{self.db_host}:{self.db_port}/{self.db_name}, autocommit={self.conn.autocommit}')
77
+ logger.debug(
78
+ f"PG client open: {self.db_username}@{self.db_host}:{self.db_port}/{self.db_name}, autocommit={self.conn.autocommit}"
79
+ )
70
80
 
71
81
  def change_autocommit(self, autocommit: bool):
72
82
  if autocommit == self.conn.autocommit:
@@ -74,106 +84,131 @@ class PG:
74
84
 
75
85
  self.conn.autocommit = autocommit
76
86
 
77
- def execute_query(self, query: str, *params):
87
+ def execute_query(self, query: str, params: tuple = None):
78
88
  # Make sure connection alive
79
89
  if self.conn.closed:
80
90
  self.establish_connection(self.conn.autocommit, self.row_factory)
81
91
 
82
92
  query = query.strip()
83
- logger.debug(f'🔎 Query:\n{query}')
93
+ logger.debug(f"🔎 Query:\n{query}")
84
94
 
85
95
  return self.cursor.execute(query, params)
86
96
 
87
97
  def download_csv(self, query: str, file_path: str) -> None:
88
98
  query = dedent(
89
- f'''
99
+ f"""
90
100
  COPY ({query})
91
101
  TO STDOUT
92
102
  WITH DELIMITER ','
93
103
  CSV HEADER;
94
- '''
104
+ """
95
105
  )
96
- logger.debug(f'🔎 Query:\n{query}')
97
- with open(os.path.expanduser(file_path), 'wb') as f:
106
+ logger.debug(f"🔎 Query:\n{query}")
107
+ with open(os.path.expanduser(file_path), "wb") as f:
98
108
  with self.cursor.copy(query) as copy:
99
109
  for data in copy:
100
110
  f.write(data)
101
111
 
102
- def pg_to_pg(self, pg: "PG", source_table: str, target_table: str, cols: list[str] = None) -> None:
103
- tmp_filename = generate_random_string() + '.csv'
104
- cols_str = ','.join([f'"{x}"' for x in cols]) if (cols is not None and cols != []) else '*'
112
+ def pg_to_pg(
113
+ self, pg: "PG", src_table_name: str, dst_table_name: str, cols: list[str] = None
114
+ ) -> None:
115
+ self.ensure_table_exists(src_table_name)
116
+ pg.ensure_table_exists(dst_table_name)
117
+
118
+ tmp_filename = generate_random_string(alphanum=True) + ".csv"
119
+ cols_str = (
120
+ ",".join([f'"{x}"' for x in cols])
121
+ if (cols is not None and cols != [])
122
+ else "*"
123
+ )
105
124
  try:
106
- self.download_csv(f'SELECT {cols_str} FROM {source_table}', tmp_filename)
107
- pg.upload_csv(tmp_filename, target_table)
125
+ self.download_csv(f"SELECT {cols_str} FROM {src_table_name}", tmp_filename)
126
+ pg.upload_csv(tmp_filename, dst_table_name)
108
127
  except:
109
128
  raise
110
129
  finally:
111
130
  os.remove(tmp_filename) if os.path.exists(tmp_filename) else None
112
131
 
113
- def check_table_existence(self, table_name: str) -> bool:
114
- if not self.execute_query('''SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" = '%s';''', table_name).fetchone()['cnt']:
115
- raise Exception(f'Target table \'{table_name}\' not created, please create it first!')
116
-
117
- def upload_tuples(self, cols: list[str], tuples: list[tuple], table_name: str) -> None:
118
- self.check_table_existence(table_name)
132
+ def ensure_table_exists(self, table_name: str) -> bool:
133
+ if not self.execute_query(
134
+ """SELECT count(1) AS "cnt" FROM "information_schema"."tables" WHERE "table_schema" || '.' || "table_name" = %s;""",
135
+ (table_name,),
136
+ ).fetchone()[0]:
137
+ raise Exception(
138
+ f"Target table '{table_name}' not created, please create it first!"
139
+ )
140
+
141
+ def upload_tuples(
142
+ self, cols: list[str], src_tuples: list[tuple], src_table_name: str
143
+ ) -> None:
144
+ self.ensure_table_exists(src_table_name)
119
145
 
120
- cols_str = ','.join([f'"{x}"' for x in cols])
121
- query = f'''COPY {table_name}({cols_str}) FROM STDIN'''
122
- logger.debug(f'🔎 Query:\n{query}')
146
+ cols_str = ",".join([f'"{x}"' for x in cols])
147
+ query = f"""COPY {src_table_name}({cols_str}) FROM STDIN"""
148
+ logger.debug(f"🔎 Query:\n{query}")
123
149
  with self.cursor.copy(query) as copy:
124
- for row in tuples:
150
+ for row in src_tuples:
125
151
  copy.write_row(row)
126
152
 
127
- def upload_list_of_dict(self, data: list[dict], table_name: str) -> None:
128
- self.check_table_existence(table_name)
153
+ def upload_list_of_dict(self, src_data: list[dict], dst_table_name: str) -> None:
154
+ self.ensure_table_exists(dst_table_name)
129
155
 
130
- if len(data) == 0:
131
- raise ValueError('No data to upload!')
156
+ if len(src_data) == 0:
157
+ raise ValueError("No data to upload!")
132
158
 
133
- cols = data[0].keys()
134
- cols_str = ','.join([f'"{x}"' for x in cols])
135
- query = f'''COPY {table_name}({cols_str}) FROM STDIN'''
136
- logger.debug(f'🔎 Query:\n{query}')
159
+ cols = src_data[0].keys()
160
+ cols_str = ",".join([f'"{x}"' for x in cols])
161
+ query = f"""COPY {dst_table_name}({cols_str}) FROM STDIN"""
162
+ logger.debug(f"🔎 Query:\n{query}")
137
163
  with self.cursor.copy(query) as copy:
138
- for row in data:
164
+ for row in src_data:
139
165
  copy.write_row(tuple(row[col] for col in cols))
140
166
 
141
- def upload_csv(self, file_path: str, table_name: str) -> None:
142
- self.check_table_existence(table_name)
167
+ def upload_csv(self, src_filename: str, dst_table_name: str) -> None:
168
+ src_filename = os.path.expanduser(src_filename)
143
169
 
144
- cols_str = ','.join([f'"{x}"' for x in next(csv.reader(open(file_path, 'r')))])
170
+ self.ensure_table_exists(dst_table_name)
171
+
172
+ cols_str = ",".join(
173
+ [f'"{x}"' for x in next(csv.reader(open(src_filename, "r")))]
174
+ )
145
175
  query = dedent(
146
- f'''
147
- COPY {table_name}({cols_str})
176
+ f"""
177
+ COPY {dst_table_name}({cols_str})
148
178
  FROM STDIN
149
179
  DELIMITER ','
150
180
  CSV HEADER;
151
- '''
181
+ """
152
182
  )
153
- logger.debug(f'🔎 Query:\n{query}')
154
- with open(os.path.expanduser(file_path), 'r') as f:
183
+ logger.debug(f"🔎 Query:\n{query}")
184
+ with open(os.path.expanduser(src_filename), "r") as f:
155
185
  with self.cursor.copy(query) as copy:
156
186
  while data := f.read(1024):
157
187
  copy.write(data)
158
188
 
159
- def create_index(self, table_name: str, index: str | list[str], unique: bool = False) -> None:
189
+ def create_index(
190
+ self, table_name: str, index: str | list[str], unique: bool = False
191
+ ) -> None:
160
192
  try:
161
193
  index = index if type(index) == list else [index]
162
- indexes = ','.join([f'"{x}"' for x in index])
163
- self.execute_query(f'CREATE {"UNIQUE " if unique else ""}INDEX ON "{table_name}" ({indexes});', return_df=False)
194
+ indexes = ",".join([f'"{x}"' for x in index])
195
+ self.execute_query(
196
+ f'CREATE {"UNIQUE " if unique else ""}INDEX ON "{table_name}" ({indexes});',
197
+ return_df=False,
198
+ )
164
199
  except Exception as e:
165
200
  self.rollback()
166
201
  raise e
167
202
 
168
203
  def rollback(self):
169
204
  self.conn.rollback()
170
- logger.debug('🚫 Transaction rollback')
205
+ logger.debug("🚫 Transaction rollback")
171
206
 
172
207
  def commit(self):
173
208
  self.conn.commit()
174
- logger.debug('✅ Transaction commit')
209
+ logger.debug("✅ Transaction commit")
175
210
 
176
211
  def close(self):
177
212
  self.cursor.close()
178
213
  self.conn.close()
179
- logger.debug('PG client close')
214
+ logger.debug("PG client close")
utill/my_queue.py CHANGED
@@ -1,9 +1,92 @@
1
- import queue
2
1
  import concurrent.futures
2
+ import queue
3
+ from typing import Callable
3
4
 
4
5
  from loguru import logger
5
6
 
6
7
 
8
+ class StreamingQ:
9
+ def __init__(
10
+ self,
11
+ producer_func: Callable,
12
+ producer_args: tuple,
13
+ consumer_func: Callable,
14
+ max_queue_size: int = 0,
15
+ ):
16
+ self.producer_func = producer_func
17
+ self.producer_args = producer_args
18
+ self.consumer_func = consumer_func
19
+
20
+ # Use maxsize for backpressure control (0 = unlimited)
21
+ self.q = queue.Queue(maxsize=max_queue_size)
22
+
23
+ def execute(self):
24
+ """
25
+ Execute producer and consumer with true streaming using generators.
26
+ Yields consumer results as they become available.
27
+ """
28
+
29
+ def producer():
30
+ try:
31
+ for item in self.producer_func(*self.producer_args):
32
+ self.q.put(item)
33
+ logger.debug(f"🌾 Produced {item}")
34
+ except Exception as e:
35
+ logger.error(f"Producer error: {e}")
36
+ self.q.put(("ERROR", e))
37
+ finally:
38
+ # Signal end of production
39
+ self.q.put(None)
40
+ logger.debug("🌾 Producer finished")
41
+
42
+ def consumer():
43
+ while True:
44
+ item = self.q.get()
45
+
46
+ if item is None:
47
+ # End of stream signal
48
+ self.q.task_done()
49
+ break
50
+
51
+ if isinstance(item, tuple) and item[0] == "ERROR":
52
+ # Propagate producer error
53
+ self.q.task_done()
54
+ raise item[1]
55
+
56
+ try:
57
+ # Unpack item if it's a tuple, otherwise pass as single arg
58
+ if isinstance(item, tuple):
59
+ result = self.consumer_func(*item)
60
+ else:
61
+ result = self.consumer_func(item)
62
+
63
+ self.q.task_done()
64
+ logger.debug(f"🔥 Consumed {item} -> {result}")
65
+ yield result
66
+
67
+ except Exception as e:
68
+ self.q.task_done()
69
+ logger.error(f"Consumer error processing {item}: {e}")
70
+ raise
71
+
72
+ with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
73
+ # Start producer in background
74
+ future_producer = executor.submit(producer)
75
+
76
+ try:
77
+ # Yield results as they become available
78
+ for result in consumer():
79
+ yield result
80
+
81
+ # Wait for producer to complete
82
+ future_producer.result()
83
+
84
+ except Exception:
85
+ # Cancel producer if consumer fails
86
+ future_producer.cancel()
87
+ raise
88
+
89
+
7
90
  class ThreadingQ:
8
91
  def __init__(self) -> None:
9
92
  self.q = queue.Queue()
@@ -23,8 +106,14 @@ class ThreadingQ:
23
106
  return self
24
107
 
25
108
  def execute(self):
26
- if not all([self.producer_func is not None, self.producer_args is not None, self.consumer_func is not None]):
27
- raise Exception('Producer and Consumer functions must be defined!')
109
+ if not all(
110
+ [
111
+ self.producer_func is not None,
112
+ self.producer_args is not None,
113
+ self.consumer_func is not None,
114
+ ]
115
+ ):
116
+ raise Exception("Producer and Consumer functions must be defined!")
28
117
 
29
118
  def producer():
30
119
  results = []
@@ -32,7 +121,7 @@ class ThreadingQ:
32
121
  for item in self.producer_func(*self.producer_args):
33
122
  self.q.put(item)
34
123
  results.append(item)
35
- logger.debug(f'🌾 Produced {item}')
124
+ logger.debug(f"🌾 Produced {item}")
36
125
 
37
126
  self.q.put(None)
38
127
  return results
@@ -49,7 +138,7 @@ class ThreadingQ:
49
138
  results.append(result)
50
139
 
51
140
  self.q.task_done()
52
- logger.debug(f'🔥 Consumed {item}')
141
+ logger.debug(f"🔥 Consumed {item}")
53
142
 
54
143
  return results
55
144
 
@@ -59,8 +148,8 @@ class ThreadingQ:
59
148
  self.future_consumer = executor.submit(consumer)
60
149
 
61
150
  producer_result = self.future_producer.result()
62
- logger.debug('✅ Producer done')
151
+ logger.debug("✅ Producer done")
63
152
  consumer_result = self.future_consumer.result()
64
- logger.debug('✅ Consumer done')
153
+ logger.debug("✅ Consumer done")
65
154
 
66
155
  return producer_result, consumer_result
utill/my_string.py CHANGED
@@ -4,17 +4,35 @@ import re
4
4
  import string
5
5
 
6
6
 
7
- def generate_random_string(length: int = 4, alphanum: bool = False): return ''.join(random.choice(string.ascii_letters + string.digits + (r'!@#$%^&*()-=_+[]{};\':",./<>?' if not alphanum else '')) for _ in range(length))
7
+ def generate_random_string(length: int = 4, alphanum: bool = False):
8
+ return "".join(
9
+ random.choice(
10
+ string.ascii_letters
11
+ + string.digits
12
+ + (r'!@#$%^&*()-=_+[]{};\':",./<>?' if not alphanum else "")
13
+ )
14
+ for _ in range(length)
15
+ )
8
16
 
9
17
 
10
- def replace_nonnumeric(string: str, replace: str) -> str: return re.sub('[^0-9a-zA-Z]+', replace, string)
18
+ def replace_nonnumeric(string: str, replace: str) -> str:
19
+ return re.sub("[^0-9a-zA-Z]+", replace, string)
11
20
 
12
21
 
13
- def mask(string: str, mask_length_min: int = 5, mask_length_max: int = 50, display_length: int = 5) -> str:
22
+ def mask(
23
+ string: str,
24
+ mask_length_min: int = 5,
25
+ mask_length_max: int = 50,
26
+ display_length: int = 5,
27
+ ) -> str:
14
28
  if not string:
15
29
  mask_length = mask_length_min
16
30
  else:
17
31
  hash_value = int(hashlib.sha256(string.encode()).hexdigest(), 16)
18
- mask_length = mask_length_min + (hash_value % (mask_length_max - mask_length_min + 1))
32
+ mask_length = mask_length_min + (
33
+ hash_value % (mask_length_max - mask_length_min + 1)
34
+ )
19
35
 
20
- return ('*' * mask_length) + (string[(-display_length if len(string) > display_length else -1):])
36
+ return ("*" * mask_length) + (
37
+ string[(-display_length if len(string) > display_length else -1) :]
38
+ )
utill/my_style.py CHANGED
@@ -1,24 +1,26 @@
1
1
  class Styles:
2
- NONE = '\033[0m'
3
- ITALIC = '\033[3m'
4
- BOLD = '\033[1m'
5
- UNDERLINE = '\033[4m'
2
+ NONE = "\033[0m"
3
+ ITALIC = "\033[3m"
4
+ BOLD = "\033[1m"
5
+ UNDERLINE = "\033[4m"
6
6
 
7
7
 
8
8
  class Colors:
9
- HEADER = '\033[95m'
10
- OKBLUE = '\033[94m'
11
- OKCYAN = '\033[96m'
12
- OKGREEN = '\033[92m'
13
- WARNING = '\033[93m'
14
- RED = '\033[91m'
15
- BOLD = '\033[1m'
16
- UNDERLINE = '\033[4m'
17
-
18
-
19
- def make_style(styles_or_colors: list[Styles | Colors] | Styles | Colors, string: str) -> str:
9
+ HEADER = "\033[95m"
10
+ OKBLUE = "\033[94m"
11
+ OKCYAN = "\033[96m"
12
+ OKGREEN = "\033[92m"
13
+ WARNING = "\033[93m"
14
+ RED = "\033[91m"
15
+ BOLD = "\033[1m"
16
+ UNDERLINE = "\033[4m"
17
+
18
+
19
+ def make_style(
20
+ styles_or_colors: list[Styles | Colors] | Styles | Colors, string: str
21
+ ) -> str:
20
22
  if type(styles_or_colors) == list:
21
- return ''.join(styles_or_colors) + string + Styles.NONE
23
+ return "".join(styles_or_colors) + string + Styles.NONE
22
24
  else:
23
25
  return styles_or_colors + string + Styles.NONE
24
26
 
utill/my_tunnel.py CHANGED
@@ -3,7 +3,8 @@ import socket
3
3
  from loguru import logger
4
4
  from sshtunnel import SSHTunnelForwarder
5
5
 
6
- LOCALHOST = '127.0.0.1'
6
+
7
+ LOCALHOST = "127.0.0.1"
7
8
 
8
9
 
9
10
  def _get_random_port() -> int:
@@ -12,7 +13,15 @@ def _get_random_port() -> int:
12
13
  return s.getsockname()[1]
13
14
 
14
15
 
15
- def start_tunnel(host: str, port: int, user: str, key: str, target_host: str, target_port: int, local_port: int = None) -> int:
16
+ def start_tunnel(
17
+ host: str,
18
+ port: int,
19
+ user: str,
20
+ key: str,
21
+ target_host: str,
22
+ target_port: int,
23
+ local_port: int = None,
24
+ ) -> int:
16
25
  local_port = local_port or _get_random_port()
17
26
 
18
27
  tunnel = SSHTunnelForwarder(
@@ -29,14 +38,26 @@ def start_tunnel(host: str, port: int, user: str, key: str, target_host: str, ta
29
38
 
30
39
 
31
40
  def establish_tunnel(conf: dict, local_port: int = None) -> tuple:
32
- using_tunnel = bool(conf.get('tunnel_host'))
33
- local_host = LOCALHOST if using_tunnel else conf['host']
34
-
35
- z = start_tunnel(conf['tunnel_host'], conf['tunnel_port'], conf['tunnel_username'], conf['tunnel_key'], conf['host'], conf['port'], local_port=local_port)\
36
- if using_tunnel\
37
- else (None, local_host, conf['port'])
41
+ using_tunnel = bool(conf.get("tunnel_host"))
42
+ local_host = LOCALHOST if using_tunnel else conf["host"]
43
+
44
+ z = (
45
+ start_tunnel(
46
+ conf["tunnel_host"],
47
+ conf["tunnel_port"],
48
+ conf["tunnel_username"],
49
+ conf["tunnel_key"],
50
+ conf["host"],
51
+ conf["port"],
52
+ local_port=local_port,
53
+ )
54
+ if using_tunnel
55
+ else (None, local_host, conf["port"])
56
+ )
38
57
 
39
58
  if using_tunnel:
40
- logger.debug(f'🛣️ Tunnel established: {conf["host"]}:{conf["port"]} --> {conf["tunnel_username"]}@{conf["tunnel_host"]} --> {z[1]}:{z[2]}')
59
+ logger.debug(
60
+ f'🛣️ Tunnel established: {conf["host"]}:{conf["port"]} --> {conf["tunnel_username"]}@{conf["tunnel_host"]} --> {z[1]}:{z[2]}'
61
+ )
41
62
 
42
63
  return z
utill/my_xlsx.py CHANGED
@@ -1,21 +1,24 @@
1
- import duckdb
2
1
  import multiprocessing
3
2
 
3
+ import duckdb
4
4
  from loguru import logger
5
5
 
6
6
 
7
7
  def xlsx_to_csv(filename: str, sheet: str):
8
8
  con = duckdb.connect()
9
- return con.execute('install spatial;')\
10
- .execute('load spatial;')\
11
- .execute(f'select * from st_read(\'{filename}\', layer=\'{sheet}\');')\
9
+ return (
10
+ con.execute("install spatial;")
11
+ .execute("load spatial;")
12
+ .execute(f"select * from st_read('{filename}', layer='{sheet}');")
12
13
  .fetchall()
14
+ )
13
15
 
14
16
 
15
17
  def csv_to_xlsx(filename: str, output_file_path: str):
16
- logger.info(f'Converting csv \'{filename}\' into xlsx \'{output_file_path}\' ...')
18
+ logger.info(f"Converting csv '{filename}' into xlsx '{output_file_path}' ...")
17
19
  con = duckdb.connect()
18
- con.execute('install spatial;')\
19
- .execute('load spatial;')\
20
- .execute(f'set threads to {multiprocessing.cpu_count()};')\
21
- .execute(f'copy \'{filename}\' to \'{output_file_path}\' with(format gdal, driver \'xlsx\')')
20
+ con.execute("install spatial;").execute("load spatial;").execute(
21
+ f"set threads to {multiprocessing.cpu_count()};"
22
+ ).execute(
23
+ f"copy '{filename}' to '{output_file_path}' with(format gdal, driver 'xlsx')"
24
+ )
utill/templates/mb.json CHANGED
@@ -1,4 +1,5 @@
1
1
  {
2
2
  "base_url": "xxx",
3
- "api_key": "xxx"
3
+ "api_key": "xxx",
4
+ "end": true
4
5
  }
utill/templates/pg.json CHANGED
@@ -9,5 +9,6 @@
9
9
  "db": "xxx",
10
10
  "username": "xxx",
11
11
  "password": "xxx"
12
- }
12
+ },
13
+ "end": true
13
14
  }
@@ -1,34 +0,0 @@
1
- rdxz2_utill-0.0.3.dist-info/licenses/LICENSE,sha256=PF9CUvzP8XFYopEAzrMzSCovF7RdBdscPqJCDC6KjPc,1073
2
- utill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- utill/my_bq.py,sha256=ZsjRbrbczGQ70CavXfyeAPaWT1OW5LyRFBxyusNZ0cc,14701
4
- utill/my_const.py,sha256=88dOqn6NPQ5-hfRqdkew5POoAIyO91XXOGvN76oNsdo,251
5
- utill/my_csv.py,sha256=76Q7IM7T_WYF5SybObN_MbkPqycirX4FnLd0DX3Kdyg,2742
6
- utill/my_datetime.py,sha256=KEZTplLk3tgVqqC3wClXFcsF_zo40fma_rtPg4kSJHc,2125
7
- utill/my_dict.py,sha256=jPaPfdn4WYpm0uIBPiYFinpHhx1jXpFVDJ9npmvxGZQ,391
8
- utill/my_encryption.py,sha256=SCF7PPur39cW4RHidsRhw-9BZP-ymUH-6LZ9nAHJDsY,2105
9
- utill/my_env.py,sha256=mREys72Ybg2p9p2s7ApOt0s_6F5-qxR8FyYEcSJ8pmU,2093
10
- utill/my_file.py,sha256=H3QmIOwubQCUMoOuk7jwf6AnqsljWZIuM7OjelyZby4,1865
11
- utill/my_gcs.py,sha256=VY2CXQbzBUhX-HunvAZ_y4E19eiuZ1b3TF33bUkJfp4,3953
12
- utill/my_input.py,sha256=OyKLoutXpwISReltuL_Gw2oojv16tYWJqQpqabBOQx4,350
13
- utill/my_json.py,sha256=WgW6mavGhfs4h1N5XbhsDnRk2dbh_ttJWdJUj4iWDN4,1473
14
- utill/my_mb.py,sha256=3_A5kXHgnkxGbd38vK5t5MfFcj84lohjS7C2OtlSo30,14841
15
- utill/my_pg.py,sha256=udsqNok7dOFz1rO-hQhqnEj8PpH9oMdkyyjGAdLRS-w,6554
16
- utill/my_queue.py,sha256=hINP4_yjmboSjHgo1J3CtPm2X9SE3HfczyED3ip7nfk,1930
17
- utill/my_string.py,sha256=pINYFR1ligTyVZYzV8P_FolCsZQwYE1jaFNTuQ3XS_8,833
18
- utill/my_style.py,sha256=Wy6j4WL9RgGeX6cS9hhlOrufc9UC4UPTQ5UJa0ZJ3Yo,900
19
- utill/my_tunnel.py,sha256=uCpGtiG8AcRYiaN7rLnTulsZI4iFTRM8EHxwyAAfDrE,1292
20
- utill/my_xlsx.py,sha256=YcQRp6DC9girSS1fkUPVKsHspyQpr8JC8GymSSnRV-w,729
21
- utill/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- utill/cmd/_bq.py,sha256=MQGLIv_WBUBl2tf18bfYrAszx0Koa5kdTW1c8A5HDDg,520
23
- utill/cmd/_conf.py,sha256=hNdkApzRhPloRSe4RyxbWuLoyeqkK7Yx9g44kcvKOEM,1800
24
- utill/cmd/_enc.py,sha256=DBy3Iwa5DTtww7lgHPRLEilrYPrWDG1vRv5PO-YzNO8,997
25
- utill/cmd/_main.py,sha256=UJ_XTIGDO9XPIypgHhS81SJQ_8qy8JOyw98Or0Nb2x8,273
26
- utill/cmd/_pg.py,sha256=RVxEiSifyIwMDYDM69vt6WSLdVDr1cMzY6r4T2PzNRA,492
27
- utill/cmd/utill.py,sha256=TlHfiwOUcK1m58PrRCjX9sARiPYZUsoTk-KOTCOz1vM,3558
28
- utill/templates/mb.json,sha256=BPnVhMG2FgcxnThYp04Vn5zSQI0G-yQv99qTPNvmSok,44
29
- utill/templates/pg.json,sha256=49c8AoGznP-omKGEgWlIWFpj7qIjeOC5Nf5k0DxlbHE,256
30
- rdxz2_utill-0.0.3.dist-info/METADATA,sha256=WA0g56M8_ps8WcPoISrxCE-pG8PLURq3hPez2Pmc9qs,4489
31
- rdxz2_utill-0.0.3.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
32
- rdxz2_utill-0.0.3.dist-info/entry_points.txt,sha256=9n5NWz5Wi9jDvYhB_81_4icgT5xABZ-QivHD8ibcafg,47
33
- rdxz2_utill-0.0.3.dist-info/top_level.txt,sha256=tuAYZoCsr02JYbpZj7I6fl1IIo53v3GG0uoj-_fINVk,6
34
- rdxz2_utill-0.0.3.dist-info/RECORD,,