dycw-utilities 0.148.5__py3-none-any.whl → 0.149.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dycw_utilities-0.148.5.dist-info → dycw_utilities-0.149.1.dist-info}/METADATA +1 -1
- {dycw_utilities-0.148.5.dist-info → dycw_utilities-0.149.1.dist-info}/RECORD +7 -7
- utilities/__init__.py +1 -1
- utilities/postgres.py +219 -110
- {dycw_utilities-0.148.5.dist-info → dycw_utilities-0.149.1.dist-info}/WHEEL +0 -0
- {dycw_utilities-0.148.5.dist-info → dycw_utilities-0.149.1.dist-info}/entry_points.txt +0 -0
- {dycw_utilities-0.148.5.dist-info → dycw_utilities-0.149.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,4 +1,4 @@
|
|
1
|
-
utilities/__init__.py,sha256=
|
1
|
+
utilities/__init__.py,sha256=JP1_vDjx7triKVpffufyRF4tTLDTrHerAtlEopa1IYA,60
|
2
2
|
utilities/altair.py,sha256=92E2lCdyHY4Zb-vCw6rEJIsWdKipuu-Tu2ab1ufUfAk,9079
|
3
3
|
utilities/asyncio.py,sha256=z0w3fb-U5Ml5YXVaFFPClizXaQmjDO6YgZg-V9QL0VQ,16021
|
4
4
|
utilities/atomicwrites.py,sha256=xcOWenTBRS0oat3kg7Sqe51AohNThMQ2ixPL7QCG8hw,5795
|
@@ -48,7 +48,7 @@ utilities/pickle.py,sha256=MBT2xZCsv0pH868IXLGKnlcqNx2IRVKYNpRcqiQQqxw,653
|
|
48
48
|
utilities/platform.py,sha256=Ue9LSxYvg9yUXGKuz5aZoy_qkUEXde-v6B09exgSctU,2813
|
49
49
|
utilities/polars.py,sha256=BgiDryAVOapi41ddfJqN0wYh_sDj8BNEYtPB36LaHdo,71824
|
50
50
|
utilities/polars_ols.py,sha256=Uc9V5kvlWZ5cU93lKZ-cfAKdVFFw81tqwLW9PxtUvMs,5618
|
51
|
-
utilities/postgres.py,sha256=
|
51
|
+
utilities/postgres.py,sha256=rMJ2Bp5IzPz365qKy0r80usa7p_3F0bhNjgFDUHA8CY,11847
|
52
52
|
utilities/pottery.py,sha256=w2X80PXWwzdHdqSYJP6ESrPNNDP3xzpyuJn-fp-Vt3M,5969
|
53
53
|
utilities/pqdm.py,sha256=BTsYPtbKQWwX-iXF4qCkfPG7DPxIB54J989n83bXrIo,3092
|
54
54
|
utilities/psutil.py,sha256=KUlu4lrUw9Zg1V7ZGetpWpGb9DB8l_SSDWGbANFNCPU,2104
|
@@ -88,8 +88,8 @@ utilities/zoneinfo.py,sha256=oEH-nL3t4h9uawyZqWDtNtDAl6M-CLpLYGI_nI6DulM,1971
|
|
88
88
|
utilities/pytest_plugins/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
|
89
89
|
utilities/pytest_plugins/pytest_randomly.py,sha256=NXzCcGKbpgYouz5yehKb4jmxmi2SexKKpgF4M65bi10,414
|
90
90
|
utilities/pytest_plugins/pytest_regressions.py,sha256=Iwhfv_OJH7UCPZCfoh7ugZ2Xjqjil-BBBsOb8sDwiGI,1471
|
91
|
-
dycw_utilities-0.
|
92
|
-
dycw_utilities-0.
|
93
|
-
dycw_utilities-0.
|
94
|
-
dycw_utilities-0.
|
95
|
-
dycw_utilities-0.
|
91
|
+
dycw_utilities-0.149.1.dist-info/METADATA,sha256=4gXdqC0JM7T2UI9VWjQkJxq3-eqcaDG3SAEGsoRO01c,1697
|
92
|
+
dycw_utilities-0.149.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
93
|
+
dycw_utilities-0.149.1.dist-info/entry_points.txt,sha256=BOD_SoDxwsfJYOLxhrSXhHP_T7iw-HXI9f2WVkzYxvQ,135
|
94
|
+
dycw_utilities-0.149.1.dist-info/licenses/LICENSE,sha256=gppZp16M6nSVpBbUBrNL6JuYfvKwZiKgV7XoKKsHzqo,1066
|
95
|
+
dycw_utilities-0.149.1.dist-info/RECORD,,
|
utilities/__init__.py
CHANGED
utilities/postgres.py
CHANGED
@@ -12,6 +12,7 @@ from utilities.asyncio import stream_command
|
|
12
12
|
from utilities.iterables import always_iterable
|
13
13
|
from utilities.logging import get_logger
|
14
14
|
from utilities.os import temp_environ
|
15
|
+
from utilities.pathlib import ensure_suffix
|
15
16
|
from utilities.sqlalchemy import get_table_name
|
16
17
|
from utilities.timer import Timer
|
17
18
|
from utilities.types import PathLike
|
@@ -31,7 +32,6 @@ async def pg_dump(
|
|
31
32
|
path: PathLike,
|
32
33
|
/,
|
33
34
|
*,
|
34
|
-
docker: str | None = None,
|
35
35
|
format_: _PGDumpFormat = "plain",
|
36
36
|
jobs: int | None = None,
|
37
37
|
schemas: MaybeListStr | None = None,
|
@@ -40,25 +40,78 @@ async def pg_dump(
|
|
40
40
|
tables_exc: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
|
41
41
|
inserts: bool = False,
|
42
42
|
on_conflict_do_nothing: bool = False,
|
43
|
-
|
43
|
+
docker: str | None = None,
|
44
44
|
dry_run: bool = False,
|
45
|
+
logger: LoggerOrName | None = None,
|
45
46
|
) -> None:
|
46
47
|
"""Run `pg_dump`."""
|
47
|
-
path =
|
48
|
+
path = _path_pg_dump(path, format_=format_)
|
48
49
|
path.parent.mkdir(parents=True, exist_ok=True)
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
50
|
+
cmd = _build_pg_dump(
|
51
|
+
url,
|
52
|
+
path,
|
53
|
+
format_=format_,
|
54
|
+
jobs=jobs,
|
55
|
+
schemas=schemas,
|
56
|
+
schemas_exc=schemas_exc,
|
57
|
+
tables=tables,
|
58
|
+
tables_exc=tables_exc,
|
59
|
+
inserts=inserts,
|
60
|
+
on_conflict_do_nothing=on_conflict_do_nothing,
|
61
|
+
docker=docker,
|
62
|
+
)
|
63
|
+
if dry_run:
|
64
|
+
if logger is not None:
|
65
|
+
get_logger(logger=logger).info("Would run %r", str(cmd))
|
66
|
+
return
|
67
|
+
with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
|
68
|
+
try:
|
69
|
+
output = await stream_command(cmd)
|
70
|
+
except KeyboardInterrupt:
|
71
|
+
if logger is not None:
|
72
|
+
get_logger(logger=logger).info(
|
73
|
+
"Cancelled backup to %r after %s", str(path), timer
|
74
|
+
)
|
75
|
+
rmtree(path, ignore_errors=True)
|
76
|
+
else:
|
77
|
+
match output.return_code:
|
78
|
+
case 0:
|
79
|
+
if logger is not None:
|
80
|
+
get_logger(logger=logger).info(
|
81
|
+
"Backup to %r finished after %s", str(path), timer
|
82
|
+
)
|
83
|
+
case _:
|
84
|
+
if logger is not None:
|
85
|
+
get_logger(logger=logger).exception(
|
86
|
+
"Backup to %r failed after %s\nstderr:\n%s",
|
87
|
+
str(path),
|
88
|
+
timer,
|
89
|
+
output.stderr,
|
90
|
+
)
|
91
|
+
rmtree(path, ignore_errors=True)
|
92
|
+
|
93
|
+
|
94
|
+
def _build_pg_dump(
|
95
|
+
url: URL,
|
96
|
+
path: PathLike,
|
97
|
+
/,
|
98
|
+
*,
|
99
|
+
format_: _PGDumpFormat = "plain",
|
100
|
+
jobs: int | None = None,
|
101
|
+
schemas: MaybeListStr | None = None,
|
102
|
+
schemas_exc: MaybeListStr | None = None,
|
103
|
+
tables: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
|
104
|
+
tables_exc: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
|
105
|
+
inserts: bool = False,
|
106
|
+
on_conflict_do_nothing: bool = False,
|
107
|
+
docker: str | None = None,
|
108
|
+
) -> str:
|
109
|
+
database, host, port = _extract_url(url)
|
110
|
+
path = _path_pg_dump(path, format_=format_)
|
111
|
+
parts: list[str] = [
|
59
112
|
"pg_dump",
|
60
113
|
# general options
|
61
|
-
f"--dbname={
|
114
|
+
f"--dbname={database}",
|
62
115
|
f"--file={str(path)!r}",
|
63
116
|
f"--format={format_}",
|
64
117
|
"--verbose",
|
@@ -69,10 +122,10 @@ async def pg_dump(
|
|
69
122
|
"--no-privileges",
|
70
123
|
"--if-exists",
|
71
124
|
# connection options
|
72
|
-
f"--host={
|
73
|
-
f"--port={
|
125
|
+
f"--host={host}",
|
126
|
+
f"--port={port}",
|
74
127
|
"--no-password",
|
75
|
-
]
|
128
|
+
]
|
76
129
|
if (format_ == "directory") and (jobs is not None):
|
77
130
|
parts.append(f"--jobs={jobs}")
|
78
131
|
if schemas is not None:
|
@@ -91,7 +144,61 @@ async def pg_dump(
|
|
91
144
|
parts.append("--on-conflict-do-nothing")
|
92
145
|
if url.username is not None:
|
93
146
|
parts.append(f"--username={url.username}")
|
94
|
-
|
147
|
+
if docker is not None:
|
148
|
+
parts = _wrap_docker(parts, docker)
|
149
|
+
return " ".join(parts)
|
150
|
+
|
151
|
+
|
152
|
+
def _path_pg_dump(path: PathLike, /, *, format_: _PGDumpFormat = "plain") -> Path:
|
153
|
+
match format_:
|
154
|
+
case "plain":
|
155
|
+
suffix = ".sql"
|
156
|
+
case "custom":
|
157
|
+
suffix = ".pgdump"
|
158
|
+
case "directory":
|
159
|
+
suffix = None
|
160
|
+
case "tar":
|
161
|
+
suffix = ".tar"
|
162
|
+
case _ as never:
|
163
|
+
assert_never(never)
|
164
|
+
path = Path(path)
|
165
|
+
if suffix is not None:
|
166
|
+
path = ensure_suffix(path, suffix)
|
167
|
+
return path
|
168
|
+
|
169
|
+
|
170
|
+
##
|
171
|
+
|
172
|
+
|
173
|
+
async def restore(
|
174
|
+
url: URL,
|
175
|
+
path: PathLike,
|
176
|
+
/,
|
177
|
+
*,
|
178
|
+
psql: bool = False,
|
179
|
+
database: str | None = None,
|
180
|
+
data_only: bool = False,
|
181
|
+
jobs: int | None = None,
|
182
|
+
schemas: MaybeListStr | None = None,
|
183
|
+
schemas_exc: MaybeListStr | None = None,
|
184
|
+
tables: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
|
185
|
+
docker: str | None = None,
|
186
|
+
dry_run: bool = False,
|
187
|
+
logger: LoggerOrName | None = None,
|
188
|
+
) -> None:
|
189
|
+
"""Run `pg_restore`/`psql`."""
|
190
|
+
cmd = _build_pg_restore_or_psql(
|
191
|
+
url,
|
192
|
+
path,
|
193
|
+
psql=psql,
|
194
|
+
database=database,
|
195
|
+
data_only=data_only,
|
196
|
+
jobs=jobs,
|
197
|
+
schemas=schemas,
|
198
|
+
schemas_exc=schemas_exc,
|
199
|
+
tables=tables,
|
200
|
+
docker=docker,
|
201
|
+
)
|
95
202
|
if dry_run:
|
96
203
|
if logger is not None:
|
97
204
|
get_logger(logger=logger).info("Would run %r", str(cmd))
|
@@ -102,89 +209,75 @@ async def pg_dump(
|
|
102
209
|
except KeyboardInterrupt:
|
103
210
|
if logger is not None:
|
104
211
|
get_logger(logger=logger).info(
|
105
|
-
"Cancelled
|
212
|
+
"Cancelled restore from %r after %s", str(path), timer
|
106
213
|
)
|
107
|
-
rmtree(path, ignore_errors=True)
|
108
214
|
else:
|
109
215
|
match output.return_code:
|
110
216
|
case 0:
|
111
217
|
if logger is not None:
|
112
218
|
get_logger(logger=logger).info(
|
113
|
-
"
|
219
|
+
"Restore from %r finished after %s", str(path), timer
|
114
220
|
)
|
115
221
|
case _:
|
116
222
|
if logger is not None:
|
117
223
|
get_logger(logger=logger).exception(
|
118
|
-
"
|
224
|
+
"Restore from %r failed after %s\nstderr:\n%s",
|
119
225
|
str(path),
|
120
226
|
timer,
|
121
227
|
output.stderr,
|
122
228
|
)
|
123
|
-
rmtree(path, ignore_errors=True)
|
124
|
-
|
125
|
-
|
126
|
-
@dataclass(kw_only=True, slots=True)
|
127
|
-
class PGDumpError(Exception):
|
128
|
-
url: URL
|
129
|
-
|
130
|
-
|
131
|
-
@dataclass(kw_only=True, slots=True)
|
132
|
-
class _PGDumpDatabaseError(PGDumpError):
|
133
|
-
@override
|
134
|
-
def __str__(self) -> str:
|
135
|
-
return f"Expected URL to contain a 'database'; got {self.url}"
|
136
|
-
|
137
|
-
|
138
|
-
@dataclass(kw_only=True, slots=True)
|
139
|
-
class _PGDumpHostError(PGDumpError):
|
140
|
-
@override
|
141
|
-
def __str__(self) -> str:
|
142
|
-
return f"Expected URL to contain a 'host'; got {self.url}"
|
143
|
-
|
144
|
-
|
145
|
-
@dataclass(kw_only=True, slots=True)
|
146
|
-
class _PGDumpPortError(PGDumpError):
|
147
|
-
@override
|
148
|
-
def __str__(self) -> str:
|
149
|
-
return f"Expected URL to contain a 'port'; got {self.url}"
|
150
229
|
|
151
230
|
|
152
231
|
##
|
153
232
|
|
154
233
|
|
155
|
-
|
234
|
+
def _build_pg_restore_or_psql(
|
156
235
|
url: URL,
|
157
236
|
path: PathLike,
|
158
237
|
/,
|
159
238
|
*,
|
239
|
+
psql: bool = False,
|
160
240
|
database: str | None = None,
|
241
|
+
data_only: bool = False,
|
242
|
+
jobs: int | None = None,
|
243
|
+
schemas: MaybeListStr | None = None,
|
244
|
+
schemas_exc: MaybeListStr | None = None,
|
245
|
+
tables: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
|
161
246
|
docker: str | None = None,
|
247
|
+
) -> str:
|
248
|
+
path = Path(path)
|
249
|
+
if (path.suffix == ".sql") or psql:
|
250
|
+
return _build_psql(url, path, database=database, docker=docker)
|
251
|
+
return _build_pg_restore(
|
252
|
+
url,
|
253
|
+
path,
|
254
|
+
database=database,
|
255
|
+
data_only=data_only,
|
256
|
+
jobs=jobs,
|
257
|
+
schemas=schemas,
|
258
|
+
schemas_exc=schemas_exc,
|
259
|
+
tables=tables,
|
260
|
+
docker=docker,
|
261
|
+
)
|
262
|
+
|
263
|
+
|
264
|
+
def _build_pg_restore(
|
265
|
+
url: URL,
|
266
|
+
path: PathLike,
|
267
|
+
/,
|
268
|
+
*,
|
269
|
+
database: str | None = None,
|
162
270
|
data_only: bool = False,
|
163
271
|
jobs: int | None = None,
|
164
272
|
schemas: MaybeListStr | None = None,
|
165
273
|
schemas_exc: MaybeListStr | None = None,
|
166
274
|
tables: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
|
167
|
-
|
168
|
-
|
169
|
-
) -> None:
|
275
|
+
docker: str | None = None,
|
276
|
+
) -> str:
|
170
277
|
"""Run `pg_restore`."""
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
case None, str() as database_use:
|
175
|
-
...
|
176
|
-
case None, None:
|
177
|
-
raise _PGRestoreDatabaseError(url=url)
|
178
|
-
case _ as never:
|
179
|
-
assert_never(never)
|
180
|
-
if url.host is None:
|
181
|
-
raise _PGRestoreHostError(url=url)
|
182
|
-
if url.port is None:
|
183
|
-
raise _PGRestorePortError(url=url)
|
184
|
-
parts: list[str] = []
|
185
|
-
if docker is not None:
|
186
|
-
parts.extend(["docker", "exec", "-it", docker])
|
187
|
-
parts.extend([
|
278
|
+
url_database, host, port = _extract_url(url)
|
279
|
+
database_use = url_database if database is None else database
|
280
|
+
parts: list[str] = [
|
188
281
|
"pg_restore",
|
189
282
|
# general options
|
190
283
|
f"--dbname={database_use}",
|
@@ -194,10 +287,10 @@ async def pg_restore(
|
|
194
287
|
"--no-owner",
|
195
288
|
"--no-privileges",
|
196
289
|
# connection options
|
197
|
-
f"--host={
|
198
|
-
f"--port={
|
290
|
+
f"--host={host}",
|
291
|
+
f"--port={port}",
|
199
292
|
"--no-password",
|
200
|
-
]
|
293
|
+
]
|
201
294
|
if data_only:
|
202
295
|
parts.append("--data-only")
|
203
296
|
else:
|
@@ -212,68 +305,80 @@ async def pg_restore(
|
|
212
305
|
parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
|
213
306
|
if url.username is not None:
|
214
307
|
parts.append(f"--username={url.username}")
|
308
|
+
if docker is not None:
|
309
|
+
parts = _wrap_docker(parts, docker)
|
215
310
|
parts.append(str(path))
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
311
|
+
return " ".join(parts)
|
312
|
+
|
313
|
+
|
314
|
+
def _build_psql(
|
315
|
+
url: URL,
|
316
|
+
path: PathLike,
|
317
|
+
/,
|
318
|
+
*,
|
319
|
+
database: str | None = None,
|
320
|
+
docker: str | None = None,
|
321
|
+
) -> str:
|
322
|
+
"""Run `psql`."""
|
323
|
+
url_database, host, port = _extract_url(url)
|
324
|
+
database_use = url_database if database is None else database
|
325
|
+
parts: list[str] = [
|
326
|
+
"psql",
|
327
|
+
# general options
|
328
|
+
f"--dbname={database_use}",
|
329
|
+
f"--file={str(path)!r}",
|
330
|
+
# connection options
|
331
|
+
f"--host={host}",
|
332
|
+
f"--port={port}",
|
333
|
+
"--no-password",
|
334
|
+
]
|
335
|
+
if url.username is not None:
|
336
|
+
parts.append(f"--username={url.username}")
|
337
|
+
if docker is not None:
|
338
|
+
parts = _wrap_docker(parts, docker)
|
339
|
+
return " ".join(parts)
|
340
|
+
|
341
|
+
|
342
|
+
##
|
343
|
+
|
344
|
+
|
345
|
+
def _extract_url(url: URL, /) -> tuple[str, str, int]:
|
346
|
+
if url.database is None:
|
347
|
+
raise _ExtractURLDatabaseError(url=url)
|
348
|
+
if url.host is None:
|
349
|
+
raise _ExtractURLHostError(url=url)
|
350
|
+
if url.port is None:
|
351
|
+
raise _ExtractURLPortError(url=url)
|
352
|
+
return url.database, url.host, url.port
|
244
353
|
|
245
354
|
|
246
355
|
@dataclass(kw_only=True, slots=True)
|
247
|
-
class
|
356
|
+
class ExtractURLError(Exception):
|
248
357
|
url: URL
|
249
358
|
|
250
359
|
|
251
360
|
@dataclass(kw_only=True, slots=True)
|
252
|
-
class
|
361
|
+
class _ExtractURLDatabaseError(ExtractURLError):
|
253
362
|
@override
|
254
363
|
def __str__(self) -> str:
|
255
364
|
return f"Expected URL to contain a 'database'; got {self.url}"
|
256
365
|
|
257
366
|
|
258
367
|
@dataclass(kw_only=True, slots=True)
|
259
|
-
class
|
368
|
+
class _ExtractURLHostError(ExtractURLError):
|
260
369
|
@override
|
261
370
|
def __str__(self) -> str:
|
262
371
|
return f"Expected URL to contain a 'host'; got {self.url}"
|
263
372
|
|
264
373
|
|
265
374
|
@dataclass(kw_only=True, slots=True)
|
266
|
-
class
|
375
|
+
class _ExtractURLPortError(ExtractURLError):
|
267
376
|
@override
|
268
377
|
def __str__(self) -> str:
|
269
378
|
return f"Expected URL to contain a 'port'; got {self.url}"
|
270
379
|
|
271
380
|
|
272
|
-
##
|
273
|
-
|
274
|
-
|
275
381
|
def _get_table_name(obj: TableOrORMInstOrClass | str, /) -> str:
|
276
|
-
"""Get the table name from a Table or mapped class."""
|
277
382
|
match obj:
|
278
383
|
case Table() | DeclarativeBase() | type() as table_or_orm:
|
279
384
|
return get_table_name(table_or_orm)
|
@@ -283,4 +388,8 @@ def _get_table_name(obj: TableOrORMInstOrClass | str, /) -> str:
|
|
283
388
|
assert_never(never)
|
284
389
|
|
285
390
|
|
286
|
-
|
391
|
+
def _wrap_docker(parts: list[str], container: str, /) -> list[str]:
|
392
|
+
return ["docker", "exec", "-it", container, *parts]
|
393
|
+
|
394
|
+
|
395
|
+
__all__ = ["ExtractURLError", "pg_dump", "restore"]
|
File without changes
|
File without changes
|
File without changes
|