rdxz2-utill 0.0.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {rdxz2_utill-0.0.3.dist-info → rdxz2_utill-0.1.5.dist-info}/METADATA +16 -15
- rdxz2_utill-0.1.5.dist-info/RECORD +38 -0
- {rdxz2_utill-0.0.3.dist-info → rdxz2_utill-0.1.5.dist-info}/WHEEL +1 -1
- utill/cmd/_bq.py +16 -3
- utill/cmd/_conf.py +21 -16
- utill/cmd/_enc.py +8 -4
- utill/cmd/_mb.py +141 -0
- utill/cmd/_pg.py +4 -2
- utill/cmd/utill.py +203 -61
- utill/my_bq.py +661 -293
- utill/my_cli.py +48 -0
- utill/my_compare.py +34 -0
- utill/my_const.py +9 -9
- utill/my_csv.py +41 -20
- utill/my_datetime.py +25 -12
- utill/my_encryption.py +31 -13
- utill/my_env.py +25 -14
- utill/my_file.py +16 -14
- utill/my_gcs.py +93 -105
- utill/my_gdrive.py +196 -0
- utill/my_input.py +8 -4
- utill/my_json.py +6 -6
- utill/my_mb.py +357 -337
- utill/my_pg.py +96 -61
- utill/my_queue.py +96 -7
- utill/my_string.py +23 -5
- utill/my_style.py +18 -16
- utill/my_tunnel.py +30 -9
- utill/my_xlsx.py +12 -9
- utill/templates/mb.json +2 -1
- utill/templates/pg.json +2 -1
- rdxz2_utill-0.0.3.dist-info/RECORD +0 -34
- {rdxz2_utill-0.0.3.dist-info → rdxz2_utill-0.1.5.dist-info}/entry_points.txt +0 -0
- {rdxz2_utill-0.0.3.dist-info → rdxz2_utill-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {rdxz2_utill-0.0.3.dist-info → rdxz2_utill-0.1.5.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: rdxz2-utill
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.1.5
|
|
4
4
|
Summary: Your daily Python utility
|
|
5
5
|
Author-email: Richard Dharmawan <richard.dharmawan@gmail.com>
|
|
6
6
|
License: MIT License
|
|
@@ -25,29 +25,30 @@ License: MIT License
|
|
|
25
25
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
26
|
SOFTWARE.
|
|
27
27
|
Classifier: Programming Language :: Python :: 3
|
|
28
|
-
Classifier: Programming Language :: Python :: 3.
|
|
28
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
29
29
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
30
30
|
Classifier: License :: OSI Approved :: MIT License
|
|
31
31
|
Classifier: Operating System :: OS Independent
|
|
32
|
-
Requires-Python: >=3.
|
|
32
|
+
Requires-Python: >=3.10
|
|
33
33
|
Description-Content-Type: text/markdown
|
|
34
34
|
License-File: LICENSE
|
|
35
|
-
Requires-Dist: click
|
|
36
|
-
Requires-Dist: duckdb
|
|
37
|
-
Requires-Dist: humanize
|
|
38
|
-
Requires-Dist: loguru
|
|
39
|
-
Requires-Dist: paramiko
|
|
40
|
-
Requires-Dist: pydantic-settings
|
|
41
|
-
Requires-Dist: pydantic
|
|
42
|
-
Requires-Dist: requests
|
|
35
|
+
Requires-Dist: click
|
|
36
|
+
Requires-Dist: duckdb
|
|
37
|
+
Requires-Dist: humanize
|
|
38
|
+
Requires-Dist: loguru
|
|
39
|
+
Requires-Dist: paramiko
|
|
40
|
+
Requires-Dist: pydantic-settings
|
|
41
|
+
Requires-Dist: pydantic
|
|
42
|
+
Requires-Dist: requests
|
|
43
43
|
Requires-Dist: sshtunnel==0.4.0
|
|
44
44
|
Provides-Extra: google-cloud
|
|
45
|
-
Requires-Dist: google-
|
|
46
|
-
Requires-Dist: google-cloud-
|
|
45
|
+
Requires-Dist: google-api-python-client; extra == "google-cloud"
|
|
46
|
+
Requires-Dist: google-cloud-bigquery; extra == "google-cloud"
|
|
47
|
+
Requires-Dist: google-cloud-storage; extra == "google-cloud"
|
|
47
48
|
Provides-Extra: postgresql
|
|
48
|
-
Requires-Dist: psycopg
|
|
49
|
+
Requires-Dist: psycopg; extra == "postgresql"
|
|
49
50
|
Provides-Extra: pdf
|
|
50
|
-
Requires-Dist: PyPDF2
|
|
51
|
+
Requires-Dist: PyPDF2; extra == "pdf"
|
|
51
52
|
Dynamic: license-file
|
|
52
53
|
|
|
53
54
|
# Using this library
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
rdxz2_utill-0.1.5.dist-info/licenses/LICENSE,sha256=PF9CUvzP8XFYopEAzrMzSCovF7RdBdscPqJCDC6KjPc,1073
|
|
2
|
+
utill/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
+
utill/my_bq.py,sha256=os2cxf3D_2O7A6FqvQZ8NJS5UCrfuti6AvVRIDKgNss,27222
|
|
4
|
+
utill/my_cli.py,sha256=YCp6Lgis8OSgGFFIc5HEocHeogFPtYFk746VL7sEb9Y,1467
|
|
5
|
+
utill/my_compare.py,sha256=232I5cJFBYDkd-whcl371YkYbQfJTscBhlZo5LFknvY,885
|
|
6
|
+
utill/my_const.py,sha256=-nE_HNYy-oQeLJU02VX0aB7_CIg_UaWPpR_caZq-MBA,290
|
|
7
|
+
utill/my_csv.py,sha256=Y0esABZTDKJdRLzkQdbqP5TjD_pGU0ntpofbxhCnv7A,3009
|
|
8
|
+
utill/my_datetime.py,sha256=fBhorTEaRVXzjHDLbu5Is6ZbB-KMbTJNjhxgkBM_UQE,2244
|
|
9
|
+
utill/my_dict.py,sha256=jPaPfdn4WYpm0uIBPiYFinpHhx1jXpFVDJ9npmvxGZQ,391
|
|
10
|
+
utill/my_encryption.py,sha256=N8ltNH05gwYawaFfh77SzpsMKkRHdUcSkA7X-1l6VKs,2200
|
|
11
|
+
utill/my_env.py,sha256=3tHzi-R5QFGI50bJ6J706IZ33A01joD4StQB4ZJEwEs,2227
|
|
12
|
+
utill/my_file.py,sha256=zhyxpmIC3zmdtlvlcDR53PMjov68ZSSscOhQ2dOdbfM,1891
|
|
13
|
+
utill/my_gcs.py,sha256=f6aAmtWJZy2ucxZloWCJZEv_TqTszGhseATNULzE7wc,3456
|
|
14
|
+
utill/my_gdrive.py,sha256=l4eoBSElTY9lN-1orwFJw09n7-Xr1BVYCsPR8wTsoFw,6387
|
|
15
|
+
utill/my_input.py,sha256=hOpoFmDi2_AyTv6LWv7pOaMWMXrDjkRUCs8pLtzoWCY,364
|
|
16
|
+
utill/my_json.py,sha256=6FYldd4sNpnUbEHdCbUZ4sU1JDiSSnxGIOYDsa0cBnc,1472
|
|
17
|
+
utill/my_mb.py,sha256=DKHgjXIAF39fbZ7Rv6qZQw3aRQkb2mcYEkHepWQRJhQ,14594
|
|
18
|
+
utill/my_pg.py,sha256=rYOtqoXwrx-6HemFsT2B0pB0tybxmJ811pUuKGypicI,7178
|
|
19
|
+
utill/my_queue.py,sha256=3Ny5Lsir_ggFWJKQlcxuvDa5589Q-7BiR2QmVbQbMcM,4742
|
|
20
|
+
utill/my_string.py,sha256=upy5wdW71fzZqkhGDQRGTbc-XFCotW2tawriTCMexBg,965
|
|
21
|
+
utill/my_style.py,sha256=gY6ZPB0PoXVBEMKeBlhCJYYVgNEQiB-zz-FcPO6s23M,906
|
|
22
|
+
utill/my_tunnel.py,sha256=bt1MiZ7EpHYDTaD20fL3-jP-_nHHslFEPziLPX4kfdE,1455
|
|
23
|
+
utill/my_xlsx.py,sha256=2pYwQvK9Ppy_Bj3H66HmLS5_4YxZUWBCcSJOLNIzrVg,726
|
|
24
|
+
utill/cmd/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
|
+
utill/cmd/_bq.py,sha256=JhY3HcUv8ByXXV4l_8HB_uXTSljU3gv8u2kOZ-XjyCE,622
|
|
26
|
+
utill/cmd/_conf.py,sha256=TbRbnyZ-NsPDzojuqeDUhKlyZwYq_FCLyVLg53idLNk,1957
|
|
27
|
+
utill/cmd/_enc.py,sha256=aCD_8wMTPvMbjqByg0Yiac2AABrLr2Gz2KRRHXi6-ZM,1029
|
|
28
|
+
utill/cmd/_main.py,sha256=UJ_XTIGDO9XPIypgHhS81SJQ_8qy8JOyw98Or0Nb2x8,273
|
|
29
|
+
utill/cmd/_mb.py,sha256=a4TBZFhR_kEoYRBlqoTEB3bi2geHy7Fa-_oc75icLJU,5275
|
|
30
|
+
utill/cmd/_pg.py,sha256=Vn9BY0aiuLmk-HbBjBHEghG6KzR7NATQgvaDGDE2wgo,498
|
|
31
|
+
utill/cmd/utill.py,sha256=HsCfPtWSOn8dOfS31JQi4Lq83CmSd5yQmIkva3bhvWc,5344
|
|
32
|
+
utill/templates/mb.json,sha256=M46ZHSaSh4rbD_KGUViGr2B2ZV8_PC-O5Evqi35JK5g,59
|
|
33
|
+
utill/templates/pg.json,sha256=LkJt0VV3zcyt7Tpn6gulsoVQgUc-9uImXOStvzu8cdU,271
|
|
34
|
+
rdxz2_utill-0.1.5.dist-info/METADATA,sha256=_DbYYbls8tJUt3_d9jaTvIwkGsfSWaGJ3wCIrhY02X8,4466
|
|
35
|
+
rdxz2_utill-0.1.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
36
|
+
rdxz2_utill-0.1.5.dist-info/entry_points.txt,sha256=9n5NWz5Wi9jDvYhB_81_4icgT5xABZ-QivHD8ibcafg,47
|
|
37
|
+
rdxz2_utill-0.1.5.dist-info/top_level.txt,sha256=tuAYZoCsr02JYbpZj7I6fl1IIo53v3GG0uoj-_fINVk,6
|
|
38
|
+
rdxz2_utill-0.1.5.dist-info/RECORD,,
|
utill/cmd/_bq.py
CHANGED
|
@@ -1,12 +1,25 @@
|
|
|
1
|
-
def _upload_csv(
|
|
1
|
+
def _upload_csv(
|
|
2
|
+
src_filename: str,
|
|
3
|
+
dst_table_fqn: str,
|
|
4
|
+
columns: list[dict[str, str]],
|
|
5
|
+
partition_col: str = None,
|
|
6
|
+
cluster_cols: list[str] = None,
|
|
7
|
+
project: str = None,
|
|
8
|
+
):
|
|
2
9
|
from ..my_bq import BQ
|
|
3
10
|
|
|
4
11
|
bq = BQ(project)
|
|
5
|
-
bq.upload_csv(
|
|
12
|
+
bq.upload_csv(
|
|
13
|
+
src_filename,
|
|
14
|
+
dst_table_fqn,
|
|
15
|
+
[{"name": col, "data_type": dtype} for col, dtype in columns],
|
|
16
|
+
partition_col,
|
|
17
|
+
cluster_cols,
|
|
18
|
+
)
|
|
6
19
|
|
|
7
20
|
|
|
8
21
|
def _download_table(src_table_fqn: str, dst_filename: str, project: str):
|
|
9
22
|
from ..my_bq import BQ
|
|
10
23
|
|
|
11
24
|
bq = BQ(project)
|
|
12
|
-
bq.download_csv(f
|
|
25
|
+
bq.download_csv(f"SELECT * FROM {src_table_fqn}", dst_filename)
|
utill/cmd/_conf.py
CHANGED
|
@@ -1,20 +1,23 @@
|
|
|
1
1
|
def _init(mode: str):
|
|
2
2
|
from loguru import logger
|
|
3
3
|
|
|
4
|
-
from ..my_env import envs
|
|
4
|
+
from ..my_env import envs
|
|
5
|
+
from ..my_env import init_mb_file
|
|
6
|
+
from ..my_env import init_pg_file
|
|
5
7
|
|
|
6
8
|
match mode:
|
|
7
|
-
case
|
|
8
|
-
setattr(envs,
|
|
9
|
-
setattr(envs,
|
|
9
|
+
case "google-cloud":
|
|
10
|
+
setattr(envs, "GCP_PROJECT_ID", input("GCP_PROJECT_ID: "))
|
|
11
|
+
setattr(envs, "GCP_REGION", input("GCP_REGION: "))
|
|
12
|
+
setattr(envs, "GCS_BUCKET", input("GCS_BUCKET: "))
|
|
10
13
|
envs.write()
|
|
11
|
-
logger.info(
|
|
12
|
-
case
|
|
14
|
+
logger.info("Google cloud configuration initialized")
|
|
15
|
+
case "postgresql":
|
|
13
16
|
init_pg_file()
|
|
14
|
-
case
|
|
17
|
+
case "metabase":
|
|
15
18
|
init_mb_file()
|
|
16
19
|
case _:
|
|
17
|
-
logger.warning(f
|
|
20
|
+
logger.warning(f"Mode '{mode}' not recognized")
|
|
18
21
|
|
|
19
22
|
|
|
20
23
|
def _list(module: str = None):
|
|
@@ -23,32 +26,34 @@ def _list(module: str = None):
|
|
|
23
26
|
|
|
24
27
|
from loguru import logger
|
|
25
28
|
|
|
26
|
-
from ..my_env import
|
|
29
|
+
from ..my_env import MB_FILENAME
|
|
30
|
+
from ..my_env import PG_FILENAME
|
|
31
|
+
from ..my_env import envs
|
|
27
32
|
from ..my_string import mask
|
|
28
33
|
|
|
29
34
|
match module:
|
|
30
|
-
case
|
|
35
|
+
case "postgresql":
|
|
31
36
|
if not os.path.exists(PG_FILENAME):
|
|
32
|
-
logger.error(
|
|
37
|
+
logger.error("PostgreSQL configuraiton not exists")
|
|
33
38
|
return
|
|
34
39
|
|
|
35
|
-
config: dict = json.loads(open(PG_FILENAME,
|
|
40
|
+
config: dict = json.loads(open(PG_FILENAME, "r").read())
|
|
36
41
|
for k, v in config.items():
|
|
37
42
|
print(k)
|
|
38
43
|
for k2, v2 in v.items():
|
|
39
44
|
print(f'\t{k2} = {mask(str(v2)) if k2 in ("password", ) else v2}')
|
|
40
45
|
|
|
41
|
-
case
|
|
46
|
+
case "metabase":
|
|
42
47
|
if not os.path.exists(MB_FILENAME):
|
|
43
|
-
logger.error(
|
|
48
|
+
logger.error("Metabase configuration not exists")
|
|
44
49
|
return
|
|
45
50
|
|
|
46
|
-
config: dict = json.loads(open(MB_FILENAME,
|
|
51
|
+
config: dict = json.loads(open(MB_FILENAME, "r").read())
|
|
47
52
|
for k, v in config.items():
|
|
48
53
|
print(f'{k} = {mask(str(v)) if k in ("api_key", ) else v}')
|
|
49
54
|
case _:
|
|
50
55
|
for env in envs.model_fields:
|
|
51
|
-
print(f
|
|
56
|
+
print(f"{env} = {getattr(envs, env)}")
|
|
52
57
|
|
|
53
58
|
|
|
54
59
|
def _set(vars: list[tuple[str, str]]):
|
utill/cmd/_enc.py
CHANGED
|
@@ -5,22 +5,26 @@ def _encrypt(src: str, password: str, output: str = None, force: bool = False):
|
|
|
5
5
|
path_password = Path(password).expanduser()
|
|
6
6
|
if path_password.exists():
|
|
7
7
|
if not path_password.is_file():
|
|
8
|
-
raise ValueError(f
|
|
8
|
+
raise ValueError(f"Password path is not a file: {password}")
|
|
9
9
|
else:
|
|
10
|
-
password = open(path_password.as_posix(),
|
|
10
|
+
password = open(path_password.as_posix(), "r").read().strip()
|
|
11
11
|
|
|
12
12
|
path_src = Path(src).expanduser()
|
|
13
13
|
if path_src.exists():
|
|
14
14
|
if path_src.is_dir():
|
|
15
|
-
raise ValueError(f
|
|
15
|
+
raise ValueError(f"Source file is a directory: {src}")
|
|
16
16
|
|
|
17
17
|
# Do encryption
|
|
18
18
|
from ..my_encryption import encrypt_file
|
|
19
|
+
|
|
19
20
|
if output:
|
|
20
|
-
encrypt_file(
|
|
21
|
+
encrypt_file(
|
|
22
|
+
path_src.as_posix(), password, dst_filename=output, overwrite=force
|
|
23
|
+
)
|
|
21
24
|
else:
|
|
22
25
|
print(encrypt_file(path_src.as_posix(), password))
|
|
23
26
|
else:
|
|
24
27
|
# Do encryption
|
|
25
28
|
from ..my_encryption import encrypt_string
|
|
29
|
+
|
|
26
30
|
print(encrypt_string(src, password))
|
utill/cmd/_mb.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
|
|
2
|
+
def _jl_grant(emails: list[str], url: str, create_user_if_not_exists: bool = False):
|
|
3
|
+
from loguru import logger
|
|
4
|
+
|
|
5
|
+
from ..my_mb import MB
|
|
6
|
+
|
|
7
|
+
mb = MB()
|
|
8
|
+
all_users_by_email = {
|
|
9
|
+
user["email"]: user for user in mb.get_all_users(all=True)
|
|
10
|
+
}
|
|
11
|
+
all_groups_by_name = {x["name"]: x for x in mb.get_all_groups()}
|
|
12
|
+
|
|
13
|
+
# Get information for this object
|
|
14
|
+
logger.info("Getting Metabase object information")
|
|
15
|
+
object_type, object_id = mb.get_object_info_from_url(url)
|
|
16
|
+
collection_id: int | None = None
|
|
17
|
+
collection_location: str | None = None
|
|
18
|
+
match (object_type):
|
|
19
|
+
case "question":
|
|
20
|
+
question = mb.get_question(object_id)
|
|
21
|
+
collection_id = int(question["collection"]["id"])
|
|
22
|
+
collection_location = question["collection"]["location"] + str(
|
|
23
|
+
question["collection"]["id"]
|
|
24
|
+
)
|
|
25
|
+
case "dashboard":
|
|
26
|
+
dashboard = mb.get_dashboard(object_id)
|
|
27
|
+
collection_id = int(dashboard["collection"]["id"])
|
|
28
|
+
collection_location = dashboard["collection"]["location"] + str(
|
|
29
|
+
dashboard["collection"]["id"]
|
|
30
|
+
)
|
|
31
|
+
case "collection":
|
|
32
|
+
collection = mb.get_collection(object_id)
|
|
33
|
+
collection_id = object_id
|
|
34
|
+
collection_location = collection["location"] + str(
|
|
35
|
+
collection["collection"]["id"]
|
|
36
|
+
)
|
|
37
|
+
case _:
|
|
38
|
+
raise ValueError(
|
|
39
|
+
f"Unknown object type {object_type} from {url}"
|
|
40
|
+
)
|
|
41
|
+
logger.info(
|
|
42
|
+
f'Object found: type "{object_type}", ID {object_id}, collection ID {collection_id}'
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
# Get group info that this collection should be granted to
|
|
46
|
+
logger.info(f"Getting group information for the object: {collection_location}")
|
|
47
|
+
group_name = mb.decode_collection_location_to_group(collection_location)
|
|
48
|
+
if group_name not in all_groups_by_name:
|
|
49
|
+
# If group not exists, create it and immediately grant readonly access to the collectiond
|
|
50
|
+
mb.create_group(group_name)
|
|
51
|
+
all_groups_by_name = {x["name"]: x for x in mb.get_all_groups()}
|
|
52
|
+
group_id = int(all_groups_by_name[group_name]["id"])
|
|
53
|
+
mb.grant_group_to_collection(group_id, collection_id)
|
|
54
|
+
else:
|
|
55
|
+
group_id = int(all_groups_by_name[group_name]["id"])
|
|
56
|
+
logger.info(f"Group found: [{group_id}] {group_name}")
|
|
57
|
+
|
|
58
|
+
# Get user informations, create if not exists
|
|
59
|
+
logger.info(f"Getting information from {len(emails)} users")
|
|
60
|
+
users = set()
|
|
61
|
+
created_users = 0
|
|
62
|
+
not_found_emails = []
|
|
63
|
+
for email in emails:
|
|
64
|
+
if email not in all_users_by_email:
|
|
65
|
+
if create_user_if_not_exists:
|
|
66
|
+
logger.info(f"Creating user {email}")
|
|
67
|
+
email_name, email_domain = email.split("@", 1)
|
|
68
|
+
mb.create_user(
|
|
69
|
+
first_name=email_name,
|
|
70
|
+
last_name=email_domain,
|
|
71
|
+
email=email,
|
|
72
|
+
group_ids=[1], # Add to 'All Users' group
|
|
73
|
+
)
|
|
74
|
+
# all_users_by_email = {
|
|
75
|
+
# user["email"]: user for user in mb.get_all_users(all=True)
|
|
76
|
+
# }
|
|
77
|
+
created_users += 1
|
|
78
|
+
else:
|
|
79
|
+
not_found_emails.append(email)
|
|
80
|
+
if not_found_emails:
|
|
81
|
+
raise ValueError(f"Users not found: {', '.join(not_found_emails)}")
|
|
82
|
+
|
|
83
|
+
# Re-fetch all users if there are new users created
|
|
84
|
+
if created_users:
|
|
85
|
+
logger.info("Users created, re-fetching all users")
|
|
86
|
+
all_users_by_email = {
|
|
87
|
+
user["email"]: user for user in mb.get_all_users(all=True)
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
# Grant access
|
|
91
|
+
logger.info(
|
|
92
|
+
f"Granting access to group [{group_id}] {group_name} for {len(emails)} users"
|
|
93
|
+
)
|
|
94
|
+
for email in emails:
|
|
95
|
+
user = all_users_by_email[email]
|
|
96
|
+
if (
|
|
97
|
+
not user["is_active"]
|
|
98
|
+
) and create_user_if_not_exists: # Reactivate user if disabled
|
|
99
|
+
logger.info(f"Reactivating user {user['id']}")
|
|
100
|
+
mb.enable_user(user["id"])
|
|
101
|
+
|
|
102
|
+
user_id = int(user["id"])
|
|
103
|
+
user_email = user["email"]
|
|
104
|
+
if group_id in user["group_ids"]:
|
|
105
|
+
# Skip if user already in the group because it will cause 500 error on Metabase later (it tries to insert the permissions to its DB and got duplicate key error)
|
|
106
|
+
logger.info(f"User {user_id} already in group {group_id}, skipping")
|
|
107
|
+
continue
|
|
108
|
+
users.add((user_id, user_email))
|
|
109
|
+
logger.info(
|
|
110
|
+
f"Users to be granted: {', '.join([f'[{user_id}] {user_email}' for user_id, user_email in users])}"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Assign all user to the group
|
|
114
|
+
for user_id, user_email in users:
|
|
115
|
+
logger.info(f"Assigning user {user_id} to group {group_id}")
|
|
116
|
+
mb.grant_user_to_group(user_id, group_id)
|
|
117
|
+
logger.info("All users assigned to the group")
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _copy_permissions(src_email: str, dst_emails: list[str]):
|
|
121
|
+
from ..my_mb import MB
|
|
122
|
+
|
|
123
|
+
mb = MB()
|
|
124
|
+
for dst_email in dst_emails:
|
|
125
|
+
mb.mirror_permission(src_email, dst_email)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def _reset_password(emails: list[str]):
|
|
129
|
+
from ..my_mb import MB
|
|
130
|
+
|
|
131
|
+
mb = MB()
|
|
132
|
+
for email in emails:
|
|
133
|
+
mb.reset_password(email)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _disable_user(emails: list[str]):
|
|
137
|
+
from ..my_mb import MB
|
|
138
|
+
|
|
139
|
+
mb = MB()
|
|
140
|
+
for email in emails:
|
|
141
|
+
mb.disable_user(email)
|
utill/cmd/_pg.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
|
-
def _pg_to_pg(
|
|
1
|
+
def _pg_to_pg(
|
|
2
|
+
src_profile: str, src_table: str, dst_profile: str, dst_table: str, columns: str
|
|
3
|
+
):
|
|
2
4
|
from ..my_pg import PG
|
|
3
5
|
|
|
4
|
-
columns =
|
|
6
|
+
columns = ",".join([f"{x}" for x in columns.split(",")]) if columns != "*" else None
|
|
5
7
|
pg_src = PG(src_profile)
|
|
6
8
|
pg_dst = PG(dst_profile)
|
|
7
9
|
|