dycw-utilities 0.148.4__py3-none-any.whl → 0.174.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (83) hide show
  1. dycw_utilities-0.174.12.dist-info/METADATA +41 -0
  2. dycw_utilities-0.174.12.dist-info/RECORD +104 -0
  3. dycw_utilities-0.174.12.dist-info/WHEEL +4 -0
  4. {dycw_utilities-0.148.4.dist-info → dycw_utilities-0.174.12.dist-info}/entry_points.txt +3 -0
  5. utilities/__init__.py +1 -1
  6. utilities/{eventkit.py → aeventkit.py} +12 -11
  7. utilities/altair.py +7 -6
  8. utilities/asyncio.py +113 -64
  9. utilities/atomicwrites.py +1 -1
  10. utilities/atools.py +64 -4
  11. utilities/cachetools.py +9 -6
  12. utilities/click.py +145 -49
  13. utilities/concurrent.py +1 -1
  14. utilities/contextlib.py +4 -2
  15. utilities/contextvars.py +20 -1
  16. utilities/cryptography.py +3 -3
  17. utilities/dataclasses.py +15 -28
  18. utilities/docker.py +292 -0
  19. utilities/enum.py +2 -2
  20. utilities/errors.py +1 -1
  21. utilities/fastapi.py +8 -3
  22. utilities/fpdf2.py +2 -2
  23. utilities/functions.py +20 -297
  24. utilities/git.py +19 -0
  25. utilities/grp.py +28 -0
  26. utilities/hypothesis.py +360 -78
  27. utilities/inflect.py +1 -1
  28. utilities/iterables.py +12 -58
  29. utilities/jinja2.py +148 -0
  30. utilities/json.py +1 -1
  31. utilities/libcst.py +7 -7
  32. utilities/logging.py +74 -85
  33. utilities/math.py +8 -4
  34. utilities/more_itertools.py +4 -6
  35. utilities/operator.py +1 -1
  36. utilities/orjson.py +86 -34
  37. utilities/os.py +49 -2
  38. utilities/parse.py +2 -2
  39. utilities/pathlib.py +66 -34
  40. utilities/permissions.py +297 -0
  41. utilities/platform.py +5 -5
  42. utilities/polars.py +932 -420
  43. utilities/polars_ols.py +1 -1
  44. utilities/postgres.py +299 -174
  45. utilities/pottery.py +8 -73
  46. utilities/pqdm.py +3 -3
  47. utilities/pwd.py +28 -0
  48. utilities/pydantic.py +11 -0
  49. utilities/pydantic_settings.py +240 -0
  50. utilities/pydantic_settings_sops.py +76 -0
  51. utilities/pyinstrument.py +5 -5
  52. utilities/pytest.py +155 -46
  53. utilities/pytest_plugins/pytest_randomly.py +1 -1
  54. utilities/pytest_plugins/pytest_regressions.py +7 -3
  55. utilities/pytest_regressions.py +2 -3
  56. utilities/random.py +11 -6
  57. utilities/re.py +1 -1
  58. utilities/redis.py +101 -64
  59. utilities/sentinel.py +10 -0
  60. utilities/shelve.py +4 -1
  61. utilities/shutil.py +25 -0
  62. utilities/slack_sdk.py +8 -3
  63. utilities/sqlalchemy.py +422 -352
  64. utilities/sqlalchemy_polars.py +28 -52
  65. utilities/string.py +1 -1
  66. utilities/subprocess.py +864 -0
  67. utilities/tempfile.py +62 -4
  68. utilities/testbook.py +50 -0
  69. utilities/text.py +165 -42
  70. utilities/timer.py +2 -2
  71. utilities/traceback.py +46 -36
  72. utilities/types.py +62 -23
  73. utilities/typing.py +479 -19
  74. utilities/uuid.py +42 -5
  75. utilities/version.py +27 -26
  76. utilities/whenever.py +661 -151
  77. utilities/zoneinfo.py +80 -22
  78. dycw_utilities-0.148.4.dist-info/METADATA +0 -41
  79. dycw_utilities-0.148.4.dist-info/RECORD +0 -95
  80. dycw_utilities-0.148.4.dist-info/WHEEL +0 -4
  81. dycw_utilities-0.148.4.dist-info/licenses/LICENSE +0 -21
  82. utilities/period.py +0 -237
  83. utilities/typed_settings.py +0 -144
utilities/polars_ols.py CHANGED
@@ -6,8 +6,8 @@ from polars import Expr, Series, struct
6
6
  from polars_ols import RollingKwargs, compute_rolling_least_squares
7
7
 
8
8
  from utilities.errors import ImpossibleCaseError
9
- from utilities.functions import is_sequence_of
10
9
  from utilities.polars import concat_series, ensure_expr_or_series
10
+ from utilities.typing import is_sequence_of
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from polars._typing import IntoExprColumn
utilities/postgres.py CHANGED
@@ -9,10 +9,12 @@ from sqlalchemy import Table
9
9
  from sqlalchemy.orm import DeclarativeBase
10
10
 
11
11
  from utilities.asyncio import stream_command
12
+ from utilities.docker import docker_exec_cmd
12
13
  from utilities.iterables import always_iterable
13
- from utilities.logging import get_logger
14
+ from utilities.logging import to_logger
14
15
  from utilities.os import temp_environ
15
- from utilities.sqlalchemy import get_table_name
16
+ from utilities.pathlib import ensure_suffix
17
+ from utilities.sqlalchemy import extract_url, get_table_name
16
18
  from utilities.timer import Timer
17
19
  from utilities.types import PathLike
18
20
 
@@ -20,7 +22,12 @@ if TYPE_CHECKING:
20
22
  from sqlalchemy import URL
21
23
 
22
24
  from utilities.sqlalchemy import TableOrORMInstOrClass
23
- from utilities.types import LoggerOrName, MaybeListStr, MaybeSequence, PathLike
25
+ from utilities.types import (
26
+ LoggerLike,
27
+ MaybeCollection,
28
+ MaybeCollectionStr,
29
+ PathLike,
30
+ )
24
31
 
25
32
 
26
33
  type _PGDumpFormat = Literal["plain", "custom", "directory", "tar"]
@@ -31,174 +38,302 @@ async def pg_dump(
31
38
  path: PathLike,
32
39
  /,
33
40
  *,
34
- docker: str | None = None,
41
+ docker_container: str | None = None,
35
42
  format_: _PGDumpFormat = "plain",
36
43
  jobs: int | None = None,
37
- schemas: MaybeListStr | None = None,
38
- schemas_exc: MaybeListStr | None = None,
39
- tables: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
40
- tables_exc: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
44
+ data_only: bool = False,
45
+ clean: bool = False,
46
+ create: bool = False,
47
+ extension: MaybeCollectionStr | None = None,
48
+ extension_exc: MaybeCollectionStr | None = None,
49
+ schema: MaybeCollectionStr | None = None,
50
+ schema_exc: MaybeCollectionStr | None = None,
51
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
52
+ table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
41
53
  inserts: bool = False,
42
- logger: LoggerOrName | None = None,
54
+ on_conflict_do_nothing: bool = False,
55
+ role: str | None = None,
43
56
  dry_run: bool = False,
44
- ) -> None:
57
+ logger: LoggerLike | None = None,
58
+ ) -> bool:
45
59
  """Run `pg_dump`."""
46
- path = Path(path)
60
+ path = _path_pg_dump(path, format_=format_)
47
61
  path.parent.mkdir(parents=True, exist_ok=True)
48
- if url.database is None:
49
- raise _PGDumpDatabaseError(url=url)
50
- if url.host is None:
51
- raise _PGDumpHostError(url=url)
52
- if url.port is None:
53
- raise _PGDumpPortError(url=url)
54
- parts: list[str] = []
55
- if docker is not None:
56
- parts.extend(["docker", "exec", "-it", docker])
62
+ cmd = _build_pg_dump(
63
+ url,
64
+ path,
65
+ docker_container=docker_container,
66
+ format_=format_,
67
+ jobs=jobs,
68
+ data_only=data_only,
69
+ clean=clean,
70
+ create=create,
71
+ extension=extension,
72
+ extension_exc=extension_exc,
73
+ schema=schema,
74
+ schema_exc=schema_exc,
75
+ table=table,
76
+ table_exc=table_exc,
77
+ inserts=inserts,
78
+ on_conflict_do_nothing=on_conflict_do_nothing,
79
+ role=role,
80
+ )
81
+ if dry_run:
82
+ if logger is not None:
83
+ to_logger(logger).info("Would run:\n\t%r", str(cmd))
84
+ return True
85
+ with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
86
+ try:
87
+ output = await stream_command(cmd)
88
+ except KeyboardInterrupt:
89
+ if logger is not None:
90
+ to_logger(logger).info(
91
+ "Cancelled backup to %r after %s", str(path), timer
92
+ )
93
+ rmtree(path, ignore_errors=True)
94
+ return False
95
+ if output.return_code != 0:
96
+ if logger is not None:
97
+ to_logger(logger).exception(
98
+ "Backup to %r failed after %s\nstderr:\n%s",
99
+ str(path),
100
+ timer,
101
+ output.stderr,
102
+ )
103
+ rmtree(path, ignore_errors=True)
104
+ return False
105
+ if logger is not None: # pragma: no cover
106
+ to_logger(logger).info("Backup to %r finished after %s", str(path), timer)
107
+ return True # pragma: no cover
108
+
109
+
110
+ def _build_pg_dump(
111
+ url: URL,
112
+ path: PathLike,
113
+ /,
114
+ *,
115
+ docker_container: str | None = None,
116
+ format_: _PGDumpFormat = "plain",
117
+ jobs: int | None = None,
118
+ data_only: bool = False,
119
+ clean: bool = False,
120
+ create: bool = False,
121
+ extension: MaybeCollectionStr | None = None,
122
+ extension_exc: MaybeCollectionStr | None = None,
123
+ schema: MaybeCollectionStr | None = None,
124
+ schema_exc: MaybeCollectionStr | None = None,
125
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
126
+ table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
127
+ inserts: bool = False,
128
+ on_conflict_do_nothing: bool = False,
129
+ role: str | None = None,
130
+ ) -> str:
131
+ extracted = extract_url(url)
132
+ path = _path_pg_dump(path, format_=format_)
133
+ parts: list[str] = ["pg_dump"]
134
+ if docker_container is not None:
135
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
57
136
  parts.extend([
58
- "pg_dump",
59
137
  # general options
60
- f"--dbname={url.database}",
61
138
  f"--file={str(path)!r}",
62
139
  f"--format={format_}",
63
140
  "--verbose",
64
141
  # output options
142
+ *_resolve_data_only_and_clean(data_only=data_only, clean=clean),
65
143
  "--large-objects",
66
- "--clean",
67
144
  "--no-owner",
68
145
  "--no-privileges",
69
- "--if-exists",
70
146
  # connection options
71
- f"--host={url.host}",
72
- f"--port={url.port}",
147
+ f"--dbname={extracted.database}",
148
+ f"--host={extracted.host}",
149
+ f"--port={extracted.port}",
150
+ f"--username={extracted.username}",
73
151
  "--no-password",
74
152
  ])
75
153
  if (format_ == "directory") and (jobs is not None):
76
154
  parts.append(f"--jobs={jobs}")
77
- if schemas is not None:
78
- parts.extend([f"--schema={s}" for s in always_iterable(schemas)])
79
- if schemas_exc is not None:
80
- parts.extend([f"--exclude-schema={s}" for s in always_iterable(schemas_exc)])
81
- if tables is not None:
82
- parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
83
- if tables_exc is not None:
155
+ if create:
156
+ parts.append("--create")
157
+ if extension is not None:
158
+ parts.extend([f"--extension={e}" for e in always_iterable(extension)])
159
+ if extension_exc is not None:
160
+ parts.extend([
161
+ f"--exclude-extension={e}" for e in always_iterable(extension_exc)
162
+ ])
163
+ if schema is not None:
164
+ parts.extend([f"--schema={s}" for s in always_iterable(schema)])
165
+ if schema_exc is not None:
166
+ parts.extend([f"--exclude-schema={s}" for s in always_iterable(schema_exc)])
167
+ if table is not None:
168
+ parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(table)])
169
+ if table_exc is not None:
84
170
  parts.extend([
85
- f"--exclude-table={_get_table_name(t)}" for t in always_iterable(tables_exc)
171
+ f"--exclude-table={_get_table_name(t)}" for t in always_iterable(table_exc)
86
172
  ])
87
173
  if inserts:
88
174
  parts.append("--inserts")
89
- if url.username is not None:
90
- parts.append(f"--username={url.username}")
91
- cmd = " ".join(parts)
175
+ if on_conflict_do_nothing:
176
+ parts.append("--on-conflict-do-nothing")
177
+ if role is not None:
178
+ parts.append(f"--role={role}")
179
+ return " ".join(parts)
180
+
181
+
182
+ def _path_pg_dump(path: PathLike, /, *, format_: _PGDumpFormat = "plain") -> Path:
183
+ match format_:
184
+ case "plain":
185
+ suffix = ".sql"
186
+ case "custom":
187
+ suffix = ".pgdump"
188
+ case "directory":
189
+ suffix = None
190
+ case "tar":
191
+ suffix = ".tar"
192
+ case never:
193
+ assert_never(never)
194
+ path = Path(path)
195
+ if suffix is not None:
196
+ path = ensure_suffix(path, suffix)
197
+ return path
198
+
199
+
200
+ ##
201
+
202
+
203
+ async def restore(
204
+ url: URL,
205
+ path: PathLike,
206
+ /,
207
+ *,
208
+ psql: bool = False,
209
+ data_only: bool = False,
210
+ clean: bool = False,
211
+ create: bool = False,
212
+ jobs: int | None = None,
213
+ schema: MaybeCollectionStr | None = None,
214
+ schema_exc: MaybeCollectionStr | None = None,
215
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
216
+ role: str | None = None,
217
+ docker_container: str | None = None,
218
+ dry_run: bool = False,
219
+ logger: LoggerLike | None = None,
220
+ ) -> bool:
221
+ """Run `pg_restore`/`psql`."""
222
+ cmd = _build_pg_restore_or_psql(
223
+ url,
224
+ path,
225
+ psql=psql,
226
+ data_only=data_only,
227
+ clean=clean,
228
+ create=create,
229
+ jobs=jobs,
230
+ schema=schema,
231
+ schema_exc=schema_exc,
232
+ table=table,
233
+ role=role,
234
+ docker_container=docker_container,
235
+ )
92
236
  if dry_run:
93
237
  if logger is not None:
94
- get_logger(logger=logger).info("Would run %r", str(cmd))
95
- return
238
+ to_logger(logger).info("Would run:\n\t%r", str(cmd))
239
+ return True
96
240
  with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
97
241
  try:
98
242
  output = await stream_command(cmd)
99
243
  except KeyboardInterrupt:
100
244
  if logger is not None:
101
- get_logger(logger=logger).info(
102
- "Cancelled backup to %r after %s", str(path), timer
245
+ to_logger(logger).info(
246
+ "Cancelled restore from %r after %s", str(path), timer
103
247
  )
104
- rmtree(path, ignore_errors=True)
105
- else:
106
- match output.return_code:
107
- case 0:
108
- if logger is not None:
109
- get_logger(logger=logger).info(
110
- "Backup to %r finished after %s", str(path), timer
111
- )
112
- case _:
113
- if logger is not None:
114
- get_logger(logger=logger).exception(
115
- "Backup to %r failed after %s\nstderr:\n%s",
116
- str(path),
117
- timer,
118
- output.stderr,
119
- )
120
- rmtree(path, ignore_errors=True)
121
-
122
-
123
- @dataclass(kw_only=True, slots=True)
124
- class PGDumpError(Exception):
125
- url: URL
126
-
127
-
128
- @dataclass(kw_only=True, slots=True)
129
- class _PGDumpDatabaseError(PGDumpError):
130
- @override
131
- def __str__(self) -> str:
132
- return f"Expected URL to contain a 'database'; got {self.url}"
133
-
134
-
135
- @dataclass(kw_only=True, slots=True)
136
- class _PGDumpHostError(PGDumpError):
137
- @override
138
- def __str__(self) -> str:
139
- return f"Expected URL to contain a 'host'; got {self.url}"
140
-
141
-
142
- @dataclass(kw_only=True, slots=True)
143
- class _PGDumpPortError(PGDumpError):
144
- @override
145
- def __str__(self) -> str:
146
- return f"Expected URL to contain a 'port'; got {self.url}"
248
+ return False
249
+ if output.return_code != 0:
250
+ if logger is not None:
251
+ to_logger(logger).exception(
252
+ "Restore from %r failed after %s\nstderr:\n%s",
253
+ str(path),
254
+ timer,
255
+ output.stderr,
256
+ )
257
+ return False
258
+ if logger is not None: # pragma: no cover
259
+ to_logger(logger).info("Restore from %r finished after %s", str(path), timer)
260
+ return True # pragma: no cover
147
261
 
148
262
 
149
263
  ##
150
264
 
151
265
 
152
- async def pg_restore(
266
+ def _build_pg_restore_or_psql(
153
267
  url: URL,
154
268
  path: PathLike,
155
269
  /,
156
270
  *,
157
- database: str | None = None,
158
- docker: str | None = None,
271
+ psql: bool = False,
159
272
  data_only: bool = False,
273
+ clean: bool = False,
274
+ create: bool = False,
160
275
  jobs: int | None = None,
161
- schemas: MaybeListStr | None = None,
162
- schemas_exc: MaybeListStr | None = None,
163
- tables: MaybeSequence[TableOrORMInstOrClass | str] | None = None,
164
- logger: LoggerOrName | None = None,
165
- dry_run: bool = False,
166
- ) -> None:
276
+ schema: MaybeCollectionStr | None = None,
277
+ schema_exc: MaybeCollectionStr | None = None,
278
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
279
+ role: str | None = None,
280
+ docker_container: str | None = None,
281
+ ) -> str:
282
+ path = Path(path)
283
+ if (path.suffix == ".sql") or psql:
284
+ return _build_psql(url, path, docker_container=docker_container)
285
+ return _build_pg_restore(
286
+ url,
287
+ path,
288
+ data_only=data_only,
289
+ clean=clean,
290
+ create=create,
291
+ jobs=jobs,
292
+ schemas=schema,
293
+ schemas_exc=schema_exc,
294
+ tables=table,
295
+ role=role,
296
+ docker_container=docker_container,
297
+ )
298
+
299
+
300
+ def _build_pg_restore(
301
+ url: URL,
302
+ path: PathLike,
303
+ /,
304
+ *,
305
+ data_only: bool = False,
306
+ clean: bool = False,
307
+ create: bool = False,
308
+ jobs: int | None = None,
309
+ schemas: MaybeCollectionStr | None = None,
310
+ schemas_exc: MaybeCollectionStr | None = None,
311
+ tables: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
312
+ role: str | None = None,
313
+ docker_container: str | None = None,
314
+ ) -> str:
167
315
  """Run `pg_restore`."""
168
- match database, url.database:
169
- case str() as database_use, _:
170
- ...
171
- case None, str() as database_use:
172
- ...
173
- case None, None:
174
- raise _PGRestoreDatabaseError(url=url)
175
- case _ as never:
176
- assert_never(never)
177
- if url.host is None:
178
- raise _PGRestoreHostError(url=url)
179
- if url.port is None:
180
- raise _PGRestorePortError(url=url)
181
- parts: list[str] = []
182
- if docker is not None:
183
- parts.extend(["docker", "exec", "-it", docker])
316
+ extracted = extract_url(url)
317
+ parts: list[str] = ["pg_restore"]
318
+ if docker_container is not None:
319
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
184
320
  parts.extend([
185
- "pg_restore",
186
321
  # general options
187
- f"--dbname={database_use}",
188
322
  "--verbose",
189
323
  # restore options
324
+ *_resolve_data_only_and_clean(data_only=data_only, clean=clean),
190
325
  "--exit-on-error",
191
326
  "--no-owner",
192
327
  "--no-privileges",
193
328
  # connection options
194
- f"--host={url.host}",
195
- f"--port={url.port}",
329
+ f"--host={extracted.host}",
330
+ f"--port={extracted.port}",
331
+ f"--username={extracted.username}",
332
+ f"--dbname={extracted.database}",
196
333
  "--no-password",
197
334
  ])
198
- if data_only:
199
- parts.append("--data-only")
200
- else:
201
- parts.extend(["--clean", "--if-exists"])
335
+ if create:
336
+ parts.append("--create")
202
337
  if jobs is not None:
203
338
  parts.append(f"--jobs={jobs}")
204
339
  if schemas is not None:
@@ -207,77 +342,67 @@ async def pg_restore(
207
342
  parts.extend([f"--exclude-schema={s}" for s in always_iterable(schemas_exc)])
208
343
  if tables is not None:
209
344
  parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
210
- if url.username is not None:
211
- parts.append(f"--username={url.username}")
345
+ if role is not None:
346
+ parts.append(f"--role={role}")
212
347
  parts.append(str(path))
213
- cmd = " ".join(parts)
214
- if dry_run:
215
- if logger is not None:
216
- get_logger(logger=logger).info("Would run %r", str(cmd))
217
- return
218
- with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
219
- try:
220
- output = await stream_command(cmd)
221
- except KeyboardInterrupt:
222
- if logger is not None:
223
- get_logger(logger=logger).info(
224
- "Cancelled restore from %r after %s", str(path), timer
225
- )
226
- else:
227
- match output.return_code:
228
- case 0:
229
- if logger is not None:
230
- get_logger(logger=logger).info(
231
- "Restore from %r finished after %s", str(path), timer
232
- )
233
- case _:
234
- if logger is not None:
235
- get_logger(logger=logger).exception(
236
- "Restore from %r failed after %s\nstderr:\n%s",
237
- str(path),
238
- timer,
239
- output.stderr,
240
- )
348
+ return " ".join(parts)
241
349
 
242
350
 
243
- @dataclass(kw_only=True, slots=True)
244
- class PGRestoreError(Exception):
245
- url: URL
246
-
247
-
248
- @dataclass(kw_only=True, slots=True)
249
- class _PGRestoreDatabaseError(PGRestoreError):
250
- @override
251
- def __str__(self) -> str:
252
- return f"Expected URL to contain a 'database'; got {self.url}"
253
-
254
-
255
- @dataclass(kw_only=True, slots=True)
256
- class _PGRestoreHostError(PGRestoreError):
257
- @override
258
- def __str__(self) -> str:
259
- return f"Expected URL to contain a 'host'; got {self.url}"
260
-
261
-
262
- @dataclass(kw_only=True, slots=True)
263
- class _PGRestorePortError(PGRestoreError):
264
- @override
265
- def __str__(self) -> str:
266
- return f"Expected URL to contain a 'port'; got {self.url}"
351
+ def _build_psql(
352
+ url: URL, path: PathLike, /, *, docker_container: str | None = None
353
+ ) -> str:
354
+ """Run `psql`."""
355
+ extracted = extract_url(url)
356
+ parts: list[str] = ["psql"]
357
+ if docker_container is not None:
358
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
359
+ parts.extend([
360
+ # general options
361
+ f"--dbname={extracted.database}",
362
+ f"--file={str(path)!r}",
363
+ # connection options
364
+ f"--host={extracted.host}",
365
+ f"--port={extracted.port}",
366
+ f"--username={extracted.username}",
367
+ "--no-password",
368
+ ])
369
+ return " ".join(parts)
267
370
 
268
371
 
269
372
  ##
270
373
 
271
374
 
272
375
  def _get_table_name(obj: TableOrORMInstOrClass | str, /) -> str:
273
- """Get the table name from a Table or mapped class."""
274
376
  match obj:
275
377
  case Table() | DeclarativeBase() | type() as table_or_orm:
276
378
  return get_table_name(table_or_orm)
277
379
  case str() as name:
278
380
  return name
279
- case _ as never:
381
+ case never:
382
+ assert_never(never)
383
+
384
+
385
+ def _resolve_data_only_and_clean(
386
+ *, data_only: bool = False, clean: bool = False
387
+ ) -> list[str]:
388
+ match data_only, clean:
389
+ case False, False:
390
+ return []
391
+ case True, False:
392
+ return ["--data-only"]
393
+ case False, True:
394
+ return ["--clean", "--if-exists"]
395
+ case True, True:
396
+ raise _ResolveDataOnlyAndCleanError
397
+ case never:
280
398
  assert_never(never)
281
399
 
282
400
 
283
- __all__ = ["PGDumpError", "PGRestoreError", "pg_dump", "pg_restore"]
401
+ @dataclass(kw_only=True, slots=True)
402
+ class _ResolveDataOnlyAndCleanError(Exception):
403
+ @override
404
+ def __str__(self) -> str:
405
+ return "Cannot use '--data-only' and '--clean' together"
406
+
407
+
408
+ __all__ = ["pg_dump", "restore"]