backup-docker-to-local 1.1.0__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/METADATA +2 -2
- {backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/RECORD +10 -10
- baudolo/backup/app.py +44 -26
- baudolo/backup/cli.py +8 -3
- baudolo/backup/db.py +90 -28
- baudolo/seed/__main__.py +85 -46
- {backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/WHEEL +0 -0
- {backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/entry_points.txt +0 -0
- {backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/licenses/LICENSE +0 -0
- {backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: backup-docker-to-local
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.2.0
|
|
4
4
|
Summary: Backup Docker volumes to local with rsync and optional DB dumps.
|
|
5
5
|
Author: Kevin Veen-Birkenbach
|
|
6
6
|
License: AGPL-3.0-or-later
|
|
@@ -147,7 +147,7 @@ baudolo \
|
|
|
147
147
|
| Flag | Description |
|
|
148
148
|
| --------------- | ------------------------------------------- |
|
|
149
149
|
| `--everything` | Always stop containers and re-run rsync |
|
|
150
|
-
| `--dump-only
|
|
150
|
+
| `--dump-only-sql`| Skip file backups only for DB volumes when dumps succeed; non-DB volumes are still backed up; fallback to files if no dump. |
|
|
151
151
|
| `--shutdown` | Do not restart containers after backup |
|
|
152
152
|
| `--backups-dir` | Backup root directory (default: `/Backups`) |
|
|
153
153
|
| `--repo-name` | Backup namespace under machine hash |
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
backup_docker_to_local-1.
|
|
1
|
+
backup_docker_to_local-1.2.0.dist-info/licenses/LICENSE,sha256=hIahDEOTzuHCU5J2nd07LWwkLW7Hko4UFO__ffsvB-8,34523
|
|
2
2
|
baudolo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
3
|
baudolo/backup/__init__.py,sha256=5BfF8JBXB2j6sAptcmswtbjlajNWxOho6_CjwIamO7k,30
|
|
4
4
|
baudolo/backup/__main__.py,sha256=118gZ0wij9_PAtR-jlG7LizrhrxwhHlAcAPW1eFeJtU,140
|
|
5
|
-
baudolo/backup/app.py,sha256=
|
|
6
|
-
baudolo/backup/cli.py,sha256=
|
|
5
|
+
baudolo/backup/app.py,sha256=dCKSbBGWi-Yw8JnrUTBUQLUyLHXxQd1Ebp_eqCD3LWA,6550
|
|
6
|
+
baudolo/backup/cli.py,sha256=KEHgeohlyBxkACQ6yxhRqh9nTbE2B74Q5U50JBJf7SU,2477
|
|
7
7
|
baudolo/backup/compose.py,sha256=dJiZnHlBZdeXTVqLR5zIbAtwR97YWxbULVn6VL16Hqg,1136
|
|
8
|
-
baudolo/backup/db.py,sha256=
|
|
8
|
+
baudolo/backup/db.py,sha256=A6PRcTAL5ryivpAs6Lc9udmSMIOKGpCBdDq5tI_s8mo,4486
|
|
9
9
|
baudolo/backup/docker.py,sha256=6Sj9fpf1bm-CoqoeerQaq059VyDnF1Pj2ketzIt1Nkk,1364
|
|
10
10
|
baudolo/backup/shell.py,sha256=guMHWcRb0Qlrz79gqFqmJLXVQK8cJEvNkhkMe5Tpgwc,738
|
|
11
11
|
baudolo/backup/volume.py,sha256=DWBp_dZUo-a0MfnbwEd8jRAAHvSyN6vGdRSye0iQJbE,1485
|
|
@@ -17,9 +17,9 @@ baudolo/restore/run.py,sha256=rai5F27D6F8RRnFMyjcEGiHBAlVXtMOJoazs6zkLSC0,2302
|
|
|
17
17
|
baudolo/restore/db/__init__.py,sha256=C4K_YAB0F9p8QhZRZIRyV2naMb54Qf_1O2cDxwwVQAI,59
|
|
18
18
|
baudolo/restore/db/mariadb.py,sha256=_JEQFuF4GhTl8lKmgk2gsHYcXHcpCKrf32tigMSeTKI,2782
|
|
19
19
|
baudolo/restore/db/postgres.py,sha256=_NIlcxdpcQIUZ8SNBWfWIaH1oGIkmZy9emFMgjGznhk,1446
|
|
20
|
-
baudolo/seed/__main__.py,sha256=
|
|
21
|
-
backup_docker_to_local-1.
|
|
22
|
-
backup_docker_to_local-1.
|
|
23
|
-
backup_docker_to_local-1.
|
|
24
|
-
backup_docker_to_local-1.
|
|
25
|
-
backup_docker_to_local-1.
|
|
20
|
+
baudolo/seed/__main__.py,sha256=bw4IaWVhhqYF5toOmtziHRDWbYgTvqHqqRV4KnHKyf4,3088
|
|
21
|
+
backup_docker_to_local-1.2.0.dist-info/METADATA,sha256=Mg-_GQiTyZG1VdS_PiwAFjAKC8V0lrHml_gC60dcN7I,7200
|
|
22
|
+
backup_docker_to_local-1.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
23
|
+
backup_docker_to_local-1.2.0.dist-info/entry_points.txt,sha256=92f5jPSjW__9-u5gzwmWkdiHKt4ysEFCVmwMtorQCv4,147
|
|
24
|
+
backup_docker_to_local-1.2.0.dist-info/top_level.txt,sha256=y_5iNpF7EdLzqWWXIDfcTJpqijyy2hvrYgNiJXrN4r4,8
|
|
25
|
+
backup_docker_to_local-1.2.0.dist-info/RECORD,,
|
baudolo/backup/app.py
CHANGED
|
@@ -72,28 +72,27 @@ def requires_stop(containers: list[str], images_no_stop_required: list[str]) ->
|
|
|
72
72
|
return True
|
|
73
73
|
return False
|
|
74
74
|
|
|
75
|
-
|
|
76
75
|
def backup_mariadb_or_postgres(
|
|
77
76
|
*,
|
|
78
77
|
container: str,
|
|
79
78
|
volume_dir: str,
|
|
80
79
|
databases_df: "pandas.DataFrame",
|
|
81
80
|
database_containers: list[str],
|
|
82
|
-
) -> bool:
|
|
81
|
+
) -> tuple[bool, bool]:
|
|
83
82
|
"""
|
|
84
|
-
Returns
|
|
83
|
+
Returns (is_db_container, dumped_any)
|
|
85
84
|
"""
|
|
86
85
|
for img in ["mariadb", "postgres"]:
|
|
87
86
|
if has_image(container, img):
|
|
88
|
-
backup_database(
|
|
87
|
+
dumped = backup_database(
|
|
89
88
|
container=container,
|
|
90
89
|
volume_dir=volume_dir,
|
|
91
90
|
db_type=img,
|
|
92
91
|
databases_df=databases_df,
|
|
93
92
|
database_containers=database_containers,
|
|
94
93
|
)
|
|
95
|
-
return True
|
|
96
|
-
return False
|
|
94
|
+
return True, dumped
|
|
95
|
+
return False, False
|
|
97
96
|
|
|
98
97
|
|
|
99
98
|
def _backup_dumps_for_volume(
|
|
@@ -102,21 +101,26 @@ def _backup_dumps_for_volume(
|
|
|
102
101
|
vol_dir: str,
|
|
103
102
|
databases_df: "pandas.DataFrame",
|
|
104
103
|
database_containers: list[str],
|
|
105
|
-
) -> bool:
|
|
104
|
+
) -> tuple[bool, bool]:
|
|
106
105
|
"""
|
|
107
|
-
|
|
108
|
-
Returns True if at least one dump was produced.
|
|
106
|
+
Returns (found_db_container, dumped_any)
|
|
109
107
|
"""
|
|
108
|
+
found_db = False
|
|
110
109
|
dumped_any = False
|
|
110
|
+
|
|
111
111
|
for c in containers:
|
|
112
|
-
|
|
112
|
+
is_db, dumped = backup_mariadb_or_postgres(
|
|
113
113
|
container=c,
|
|
114
114
|
volume_dir=vol_dir,
|
|
115
115
|
databases_df=databases_df,
|
|
116
116
|
database_containers=database_containers,
|
|
117
|
-
)
|
|
117
|
+
)
|
|
118
|
+
if is_db:
|
|
119
|
+
found_db = True
|
|
120
|
+
if dumped:
|
|
118
121
|
dumped_any = True
|
|
119
|
-
|
|
122
|
+
|
|
123
|
+
return found_db, dumped_any
|
|
120
124
|
|
|
121
125
|
|
|
122
126
|
def main() -> int:
|
|
@@ -128,7 +132,12 @@ def main() -> int:
|
|
|
128
132
|
versions_dir = os.path.join(args.backups_dir, machine_id, args.repo_name)
|
|
129
133
|
version_dir = create_version_directory(versions_dir, backup_time)
|
|
130
134
|
|
|
131
|
-
|
|
135
|
+
# IMPORTANT:
|
|
136
|
+
# - keep_default_na=False prevents empty fields from turning into NaN
|
|
137
|
+
# - dtype=str keeps all columns stable for comparisons/validation
|
|
138
|
+
databases_df = pandas.read_csv(
|
|
139
|
+
args.databases_csv, sep=";", keep_default_na=False, dtype=str
|
|
140
|
+
)
|
|
132
141
|
|
|
133
142
|
print("💾 Start volume backups...", flush=True)
|
|
134
143
|
|
|
@@ -136,27 +145,36 @@ def main() -> int:
|
|
|
136
145
|
print(f"Start backup routine for volume: {volume_name}", flush=True)
|
|
137
146
|
containers = containers_using_volume(volume_name)
|
|
138
147
|
|
|
148
|
+
# EARLY SKIP: if all linked containers are ignored, do not create any dirs
|
|
149
|
+
if volume_is_fully_ignored(containers, args.images_no_backup_required):
|
|
150
|
+
print(
|
|
151
|
+
f"Skipping volume '{volume_name}' entirely (all linked containers are ignored).",
|
|
152
|
+
flush=True,
|
|
153
|
+
)
|
|
154
|
+
continue
|
|
155
|
+
|
|
139
156
|
vol_dir = create_volume_directory(version_dir, volume_name)
|
|
140
157
|
|
|
141
|
-
|
|
142
|
-
_backup_dumps_for_volume(
|
|
158
|
+
found_db, dumped_any = _backup_dumps_for_volume(
|
|
143
159
|
containers=containers,
|
|
144
160
|
vol_dir=vol_dir,
|
|
145
161
|
databases_df=databases_df,
|
|
146
162
|
database_containers=args.database_containers,
|
|
147
163
|
)
|
|
148
164
|
|
|
149
|
-
# dump-only:
|
|
150
|
-
if args.
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
165
|
+
# dump-only-sql logic:
|
|
166
|
+
if args.dump_only_sql:
|
|
167
|
+
if found_db:
|
|
168
|
+
if not dumped_any:
|
|
169
|
+
print(
|
|
170
|
+
f"WARNING: dump-only-sql requested but no DB dump was produced for DB volume '{volume_name}'. Falling back to file backup.",
|
|
171
|
+
flush=True,
|
|
172
|
+
)
|
|
173
|
+
# fall through to file backup below
|
|
174
|
+
else:
|
|
175
|
+
# DB volume successfully dumped -> skip file backup
|
|
176
|
+
continue
|
|
177
|
+
# Non-DB volume -> always do file backup (fall through)
|
|
160
178
|
|
|
161
179
|
if args.everything:
|
|
162
180
|
# "everything": always do pre-rsync, then stop + rsync again
|
baudolo/backup/cli.py
CHANGED
|
@@ -68,10 +68,15 @@ def parse_args() -> argparse.Namespace:
|
|
|
68
68
|
action="store_true",
|
|
69
69
|
help="Do not restart containers after backup",
|
|
70
70
|
)
|
|
71
|
+
|
|
71
72
|
p.add_argument(
|
|
72
|
-
"--dump-only",
|
|
73
|
+
"--dump-only-sql",
|
|
73
74
|
action="store_true",
|
|
74
|
-
help=
|
|
75
|
+
help=(
|
|
76
|
+
"Create database dumps only for DB volumes. "
|
|
77
|
+
"File backups are skipped for DB volumes if a dump succeeds, "
|
|
78
|
+
"but non-DB volumes are still backed up. "
|
|
79
|
+
"If a DB dump cannot be produced, baudolo falls back to a file backup."
|
|
80
|
+
),
|
|
75
81
|
)
|
|
76
|
-
|
|
77
82
|
return p.parse_args()
|
baudolo/backup/db.py
CHANGED
|
@@ -3,9 +3,10 @@ from __future__ import annotations
|
|
|
3
3
|
import os
|
|
4
4
|
import pathlib
|
|
5
5
|
import re
|
|
6
|
+
import logging
|
|
7
|
+
from typing import Optional
|
|
6
8
|
|
|
7
9
|
import pandas
|
|
8
|
-
import logging
|
|
9
10
|
|
|
10
11
|
from .shell import BackupException, execute_shell_command
|
|
11
12
|
|
|
@@ -13,19 +14,53 @@ log = logging.getLogger(__name__)
|
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
def get_instance(container: str, database_containers: list[str]) -> str:
|
|
17
|
+
"""
|
|
18
|
+
Derive a stable instance name from the container name.
|
|
19
|
+
"""
|
|
16
20
|
if container in database_containers:
|
|
17
21
|
return container
|
|
18
22
|
return re.split(r"(_|-)(database|db|postgres)", container)[0]
|
|
19
23
|
|
|
20
24
|
|
|
21
|
-
def
|
|
22
|
-
|
|
23
|
-
|
|
25
|
+
def _validate_database_value(value: Optional[str], *, instance: str) -> str:
|
|
26
|
+
"""
|
|
27
|
+
Enforce explicit database semantics:
|
|
28
|
+
|
|
29
|
+
- "*" => dump ALL databases (cluster dump for Postgres)
|
|
30
|
+
- "<name>" => dump exactly this database
|
|
31
|
+
- "" => invalid configuration (would previously result in NaN / nan.backup.sql)
|
|
32
|
+
"""
|
|
33
|
+
v = (value or "").strip()
|
|
34
|
+
if v == "":
|
|
35
|
+
raise ValueError(
|
|
36
|
+
f"Invalid databases.csv entry for instance '{instance}': "
|
|
37
|
+
"column 'database' must be '*' or a concrete database name (not empty)."
|
|
38
|
+
)
|
|
39
|
+
return v
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _atomic_write_cmd(cmd: str, out_file: str) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Write dump output atomically:
|
|
45
|
+
- write to <file>.tmp
|
|
46
|
+
- rename to <file> only on success
|
|
47
|
+
|
|
48
|
+
This prevents empty or partial dump files from being treated as valid backups.
|
|
49
|
+
"""
|
|
50
|
+
tmp = f"{out_file}.tmp"
|
|
51
|
+
execute_shell_command(f"{cmd} > {tmp}")
|
|
52
|
+
execute_shell_command(f"mv {tmp} {out_file}")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def fallback_pg_dumpall(container: str, username: str, password: str, out_file: str) -> None:
|
|
56
|
+
"""
|
|
57
|
+
Perform a full Postgres cluster dump using pg_dumpall.
|
|
58
|
+
"""
|
|
24
59
|
cmd = (
|
|
25
60
|
f"PGPASSWORD={password} docker exec -i {container} "
|
|
26
|
-
f"pg_dumpall -U {username} -h localhost
|
|
61
|
+
f"pg_dumpall -U {username} -h localhost"
|
|
27
62
|
)
|
|
28
|
-
|
|
63
|
+
_atomic_write_cmd(cmd, out_file)
|
|
29
64
|
|
|
30
65
|
|
|
31
66
|
def backup_database(
|
|
@@ -35,48 +70,75 @@ def backup_database(
|
|
|
35
70
|
db_type: str,
|
|
36
71
|
databases_df: "pandas.DataFrame",
|
|
37
72
|
database_containers: list[str],
|
|
38
|
-
) ->
|
|
73
|
+
) -> bool:
|
|
74
|
+
"""
|
|
75
|
+
Backup databases for a given DB container.
|
|
76
|
+
|
|
77
|
+
Returns True if at least one dump was produced.
|
|
78
|
+
"""
|
|
39
79
|
instance_name = get_instance(container, database_containers)
|
|
40
|
-
|
|
80
|
+
|
|
81
|
+
entries = databases_df[databases_df["instance"] == instance_name]
|
|
41
82
|
if entries.empty:
|
|
42
|
-
log.
|
|
43
|
-
return
|
|
83
|
+
log.debug("No database entries for instance '%s'", instance_name)
|
|
84
|
+
return False
|
|
44
85
|
|
|
45
86
|
out_dir = os.path.join(volume_dir, "sql")
|
|
46
87
|
pathlib.Path(out_dir).mkdir(parents=True, exist_ok=True)
|
|
47
88
|
|
|
48
|
-
|
|
49
|
-
db_name = row["database"]
|
|
50
|
-
user = row["username"]
|
|
51
|
-
password = row["password"]
|
|
89
|
+
produced = False
|
|
52
90
|
|
|
91
|
+
for row in entries.itertuples(index=False):
|
|
92
|
+
raw_db = getattr(row, "database", "")
|
|
93
|
+
user = (getattr(row, "username", "") or "").strip()
|
|
94
|
+
password = (getattr(row, "password", "") or "").strip()
|
|
95
|
+
|
|
96
|
+
db_value = _validate_database_value(raw_db, instance=instance_name)
|
|
97
|
+
|
|
98
|
+
# Explicit: dump ALL databases
|
|
99
|
+
if db_value == "*":
|
|
100
|
+
if db_type != "postgres":
|
|
101
|
+
raise ValueError(
|
|
102
|
+
f"databases.csv entry for instance '{instance_name}': "
|
|
103
|
+
"'*' is currently only supported for Postgres."
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
cluster_file = os.path.join(
|
|
107
|
+
out_dir, f"{instance_name}.cluster.backup.sql"
|
|
108
|
+
)
|
|
109
|
+
fallback_pg_dumpall(container, user, password, cluster_file)
|
|
110
|
+
produced = True
|
|
111
|
+
continue
|
|
112
|
+
|
|
113
|
+
# Concrete database dump
|
|
114
|
+
db_name = db_value
|
|
53
115
|
dump_file = os.path.join(out_dir, f"{db_name}.backup.sql")
|
|
54
116
|
|
|
55
117
|
if db_type == "mariadb":
|
|
56
118
|
cmd = (
|
|
57
119
|
f"docker exec {container} /usr/bin/mariadb-dump "
|
|
58
|
-
f"-u {user} -p{password} {db_name}
|
|
120
|
+
f"-u {user} -p{password} {db_name}"
|
|
59
121
|
)
|
|
60
|
-
|
|
122
|
+
_atomic_write_cmd(cmd, dump_file)
|
|
123
|
+
produced = True
|
|
61
124
|
continue
|
|
62
125
|
|
|
63
126
|
if db_type == "postgres":
|
|
64
|
-
cluster_file = os.path.join(out_dir, f"{instance_name}.cluster.backup.sql")
|
|
65
|
-
if not db_name:
|
|
66
|
-
fallback_pg_dumpall(container, user, password, cluster_file)
|
|
67
|
-
return
|
|
68
|
-
|
|
69
127
|
try:
|
|
70
128
|
cmd = (
|
|
71
129
|
f"PGPASSWORD={password} docker exec -i {container} "
|
|
72
|
-
f"pg_dump -U {user} -d {db_name} -h localhost
|
|
130
|
+
f"pg_dump -U {user} -d {db_name} -h localhost"
|
|
73
131
|
)
|
|
74
|
-
|
|
132
|
+
_atomic_write_cmd(cmd, dump_file)
|
|
133
|
+
produced = True
|
|
75
134
|
except BackupException as e:
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
f"
|
|
79
|
-
|
|
135
|
+
# Explicit DB dump failed -> hard error
|
|
136
|
+
raise BackupException(
|
|
137
|
+
f"Postgres dump failed for instance '{instance_name}', "
|
|
138
|
+
f"database '{db_name}'. This database was explicitly configured "
|
|
139
|
+
"and therefore must succeed.\n"
|
|
140
|
+
f"{e}"
|
|
80
141
|
)
|
|
81
|
-
fallback_pg_dumpall(container, user, password, cluster_file)
|
|
82
142
|
continue
|
|
143
|
+
|
|
144
|
+
return produced
|
baudolo/seed/__main__.py
CHANGED
|
@@ -1,67 +1,106 @@
|
|
|
1
|
-
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
2
4
|
import argparse
|
|
3
5
|
import os
|
|
6
|
+
import re
|
|
7
|
+
import sys
|
|
8
|
+
import pandas as pd
|
|
9
|
+
from typing import Optional
|
|
4
10
|
|
|
5
11
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
# Check if the entry exists and remove it
|
|
16
|
-
mask = (
|
|
17
|
-
(df["instance"] == instance)
|
|
18
|
-
& (
|
|
19
|
-
(df["database"] == database)
|
|
20
|
-
| (((df["database"].isna()) | (df["database"] == "")) & (database == ""))
|
|
12
|
+
DB_NAME_RE = re.compile(r"^[a-zA-Z0-9_][a-zA-Z0-9_-]*$")
|
|
13
|
+
|
|
14
|
+
def _validate_database_value(value: Optional[str], *, instance: str) -> str:
|
|
15
|
+
v = (value or "").strip()
|
|
16
|
+
if v == "":
|
|
17
|
+
raise ValueError(
|
|
18
|
+
f"Invalid databases.csv entry for instance '{instance}': "
|
|
19
|
+
"column 'database' must be '*' or a concrete database name (not empty)."
|
|
21
20
|
)
|
|
22
|
-
|
|
23
|
-
|
|
21
|
+
if v == "*":
|
|
22
|
+
return "*"
|
|
23
|
+
if v.lower() == "nan":
|
|
24
|
+
raise ValueError(
|
|
25
|
+
f"Invalid databases.csv entry for instance '{instance}': database must not be 'nan'."
|
|
26
|
+
)
|
|
27
|
+
if not DB_NAME_RE.match(v):
|
|
28
|
+
raise ValueError(
|
|
29
|
+
f"Invalid databases.csv entry for instance '{instance}': "
|
|
30
|
+
f"invalid database name '{v}'. Allowed: letters, numbers, '_' and '-'."
|
|
31
|
+
)
|
|
32
|
+
return v
|
|
33
|
+
|
|
34
|
+
def check_and_add_entry(
|
|
35
|
+
file_path: str,
|
|
36
|
+
instance: str,
|
|
37
|
+
database: Optional[str],
|
|
38
|
+
username: str,
|
|
39
|
+
password: str,
|
|
40
|
+
) -> None:
|
|
41
|
+
"""
|
|
42
|
+
Add or update an entry in databases.csv.
|
|
43
|
+
|
|
44
|
+
The function enforces strict validation:
|
|
45
|
+
- database MUST be set
|
|
46
|
+
- database MUST be '*' or a valid database name
|
|
47
|
+
"""
|
|
48
|
+
database = _validate_database_value(database, instance=instance)
|
|
24
49
|
|
|
25
|
-
if
|
|
26
|
-
|
|
27
|
-
|
|
50
|
+
if os.path.exists(file_path):
|
|
51
|
+
df = pd.read_csv(
|
|
52
|
+
file_path,
|
|
53
|
+
sep=";",
|
|
54
|
+
dtype=str,
|
|
55
|
+
keep_default_na=False,
|
|
56
|
+
)
|
|
28
57
|
else:
|
|
29
|
-
|
|
58
|
+
df = pd.DataFrame(
|
|
59
|
+
columns=["instance", "database", "username", "password"]
|
|
60
|
+
)
|
|
30
61
|
|
|
31
|
-
|
|
32
|
-
new_entry = pd.DataFrame(
|
|
33
|
-
[
|
|
34
|
-
{
|
|
35
|
-
"instance": instance,
|
|
36
|
-
"database": database,
|
|
37
|
-
"username": username,
|
|
38
|
-
"password": password,
|
|
39
|
-
}
|
|
40
|
-
]
|
|
41
|
-
)
|
|
62
|
+
mask = (df["instance"] == instance) & (df["database"] == database)
|
|
42
63
|
|
|
43
|
-
|
|
44
|
-
|
|
64
|
+
if mask.any():
|
|
65
|
+
print("Updating existing entry.")
|
|
66
|
+
df.loc[mask, ["username", "password"]] = [username, password]
|
|
67
|
+
else:
|
|
68
|
+
print("Adding new entry.")
|
|
69
|
+
new_entry = pd.DataFrame(
|
|
70
|
+
[[instance, database, username, password]],
|
|
71
|
+
columns=["instance", "database", "username", "password"],
|
|
72
|
+
)
|
|
73
|
+
df = pd.concat([df, new_entry], ignore_index=True)
|
|
45
74
|
|
|
46
|
-
# Save the updated CSV file
|
|
47
75
|
df.to_csv(file_path, sep=";", index=False)
|
|
48
76
|
|
|
49
77
|
|
|
50
|
-
def main():
|
|
78
|
+
def main() -> None:
|
|
51
79
|
parser = argparse.ArgumentParser(
|
|
52
|
-
description="
|
|
80
|
+
description="Seed or update databases.csv for backup configuration."
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument("file", help="Path to databases.csv")
|
|
83
|
+
parser.add_argument("instance", help="Instance name (e.g. bigbluebutton)")
|
|
84
|
+
parser.add_argument(
|
|
85
|
+
"database",
|
|
86
|
+
help="Database name or '*' to dump all databases",
|
|
53
87
|
)
|
|
54
|
-
parser.add_argument("
|
|
55
|
-
parser.add_argument("
|
|
56
|
-
parser.add_argument("database", help="Database name")
|
|
57
|
-
parser.add_argument("username", help="Username")
|
|
58
|
-
parser.add_argument("password", nargs="?", default="", help="Password (optional)")
|
|
88
|
+
parser.add_argument("username", help="Database username")
|
|
89
|
+
parser.add_argument("password", help="Database password")
|
|
59
90
|
|
|
60
91
|
args = parser.parse_args()
|
|
61
92
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
93
|
+
try:
|
|
94
|
+
check_and_add_entry(
|
|
95
|
+
file_path=args.file,
|
|
96
|
+
instance=args.instance,
|
|
97
|
+
database=args.database,
|
|
98
|
+
username=args.username,
|
|
99
|
+
password=args.password,
|
|
100
|
+
)
|
|
101
|
+
except Exception as exc:
|
|
102
|
+
print(f"ERROR: {exc}", file=sys.stderr)
|
|
103
|
+
sys.exit(1)
|
|
65
104
|
|
|
66
105
|
|
|
67
106
|
if __name__ == "__main__":
|
|
File without changes
|
{backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|
{backup_docker_to_local-1.1.0.dist-info → backup_docker_to_local-1.2.0.dist-info}/top_level.txt
RENAMED
|
File without changes
|