dycw-utilities 0.146.2__py3-none-any.whl → 0.178.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dycw-utilities might be problematic. Click here for more details.

Files changed (89) hide show
  1. dycw_utilities-0.178.1.dist-info/METADATA +34 -0
  2. dycw_utilities-0.178.1.dist-info/RECORD +105 -0
  3. dycw_utilities-0.178.1.dist-info/WHEEL +4 -0
  4. {dycw_utilities-0.146.2.dist-info → dycw_utilities-0.178.1.dist-info}/entry_points.txt +1 -0
  5. utilities/__init__.py +1 -1
  6. utilities/altair.py +10 -7
  7. utilities/asyncio.py +129 -50
  8. utilities/atomicwrites.py +1 -1
  9. utilities/atools.py +64 -4
  10. utilities/cachetools.py +9 -6
  11. utilities/click.py +144 -49
  12. utilities/concurrent.py +1 -1
  13. utilities/contextlib.py +4 -2
  14. utilities/contextvars.py +20 -1
  15. utilities/cryptography.py +3 -3
  16. utilities/dataclasses.py +15 -28
  17. utilities/docker.py +387 -0
  18. utilities/enum.py +2 -2
  19. utilities/errors.py +17 -3
  20. utilities/fastapi.py +8 -3
  21. utilities/fpdf2.py +2 -2
  22. utilities/functions.py +20 -297
  23. utilities/git.py +19 -0
  24. utilities/grp.py +28 -0
  25. utilities/hypothesis.py +361 -79
  26. utilities/importlib.py +17 -1
  27. utilities/inflect.py +1 -1
  28. utilities/iterables.py +33 -58
  29. utilities/jinja2.py +148 -0
  30. utilities/json.py +1 -1
  31. utilities/libcst.py +7 -7
  32. utilities/logging.py +131 -93
  33. utilities/math.py +8 -4
  34. utilities/more_itertools.py +4 -6
  35. utilities/operator.py +1 -1
  36. utilities/orjson.py +86 -34
  37. utilities/os.py +49 -2
  38. utilities/packaging.py +115 -0
  39. utilities/parse.py +2 -2
  40. utilities/pathlib.py +66 -34
  41. utilities/permissions.py +298 -0
  42. utilities/platform.py +5 -4
  43. utilities/polars.py +934 -420
  44. utilities/polars_ols.py +1 -1
  45. utilities/postgres.py +317 -153
  46. utilities/pottery.py +10 -86
  47. utilities/pqdm.py +3 -3
  48. utilities/pwd.py +28 -0
  49. utilities/pydantic.py +4 -51
  50. utilities/pydantic_settings.py +240 -0
  51. utilities/pydantic_settings_sops.py +76 -0
  52. utilities/pyinstrument.py +5 -5
  53. utilities/pytest.py +100 -126
  54. utilities/pytest_plugins/pytest_randomly.py +1 -1
  55. utilities/pytest_plugins/pytest_regressions.py +7 -3
  56. utilities/pytest_regressions.py +27 -8
  57. utilities/random.py +11 -6
  58. utilities/re.py +1 -1
  59. utilities/redis.py +101 -64
  60. utilities/sentinel.py +10 -0
  61. utilities/shelve.py +4 -1
  62. utilities/shutil.py +25 -0
  63. utilities/slack_sdk.py +9 -4
  64. utilities/sqlalchemy.py +422 -352
  65. utilities/sqlalchemy_polars.py +28 -52
  66. utilities/string.py +1 -1
  67. utilities/subprocess.py +1977 -0
  68. utilities/tempfile.py +112 -4
  69. utilities/testbook.py +50 -0
  70. utilities/text.py +174 -42
  71. utilities/throttle.py +158 -0
  72. utilities/timer.py +2 -2
  73. utilities/traceback.py +59 -38
  74. utilities/types.py +68 -22
  75. utilities/typing.py +479 -19
  76. utilities/uuid.py +42 -5
  77. utilities/version.py +27 -26
  78. utilities/whenever.py +663 -178
  79. utilities/zoneinfo.py +80 -22
  80. dycw_utilities-0.146.2.dist-info/METADATA +0 -41
  81. dycw_utilities-0.146.2.dist-info/RECORD +0 -99
  82. dycw_utilities-0.146.2.dist-info/WHEEL +0 -4
  83. dycw_utilities-0.146.2.dist-info/licenses/LICENSE +0 -21
  84. utilities/aiolimiter.py +0 -25
  85. utilities/eventkit.py +0 -388
  86. utilities/period.py +0 -237
  87. utilities/python_dotenv.py +0 -101
  88. utilities/streamlit.py +0 -105
  89. utilities/typed_settings.py +0 -144
utilities/polars_ols.py CHANGED
@@ -6,8 +6,8 @@ from polars import Expr, Series, struct
6
6
  from polars_ols import RollingKwargs, compute_rolling_least_squares
7
7
 
8
8
  from utilities.errors import ImpossibleCaseError
9
- from utilities.functions import is_sequence_of
10
9
  from utilities.polars import concat_series, ensure_expr_or_series
10
+ from utilities.typing import is_sequence_of
11
11
 
12
12
  if TYPE_CHECKING:
13
13
  from polars._typing import IntoExprColumn
utilities/postgres.py CHANGED
@@ -5,18 +5,29 @@ from pathlib import Path
5
5
  from shutil import rmtree
6
6
  from typing import TYPE_CHECKING, Literal, assert_never, override
7
7
 
8
+ from sqlalchemy import Table
9
+ from sqlalchemy.orm import DeclarativeBase
10
+
8
11
  from utilities.asyncio import stream_command
12
+ from utilities.docker import docker_exec_cmd
9
13
  from utilities.iterables import always_iterable
10
- from utilities.logging import get_logger
14
+ from utilities.logging import to_logger
11
15
  from utilities.os import temp_environ
12
- from utilities.sqlalchemy import TableOrORMInstOrClass, get_table_name
16
+ from utilities.pathlib import ensure_suffix
17
+ from utilities.sqlalchemy import extract_url, get_table_name
13
18
  from utilities.timer import Timer
14
19
  from utilities.types import PathLike
15
20
 
16
21
  if TYPE_CHECKING:
17
22
  from sqlalchemy import URL
18
23
 
19
- from utilities.types import LoggerOrName, MaybeListStr, MaybeSequence, PathLike
24
+ from utilities.sqlalchemy import TableOrORMInstOrClass
25
+ from utilities.types import (
26
+ LoggerLike,
27
+ MaybeCollection,
28
+ MaybeCollectionStr,
29
+ PathLike,
30
+ )
20
31
 
21
32
 
22
33
  type _PGDumpFormat = Literal["plain", "custom", "directory", "tar"]
@@ -27,218 +38,371 @@ async def pg_dump(
27
38
  path: PathLike,
28
39
  /,
29
40
  *,
41
+ docker_container: str | None = None,
30
42
  format_: _PGDumpFormat = "plain",
31
43
  jobs: int | None = None,
32
- schemas: MaybeListStr | None = None,
33
- tables: MaybeSequence[TableOrORMInstOrClass] | None = None,
34
- logger: LoggerOrName | None = None,
44
+ data_only: bool = False,
45
+ clean: bool = False,
46
+ create: bool = False,
47
+ extension: MaybeCollectionStr | None = None,
48
+ extension_exc: MaybeCollectionStr | None = None,
49
+ schema: MaybeCollectionStr | None = None,
50
+ schema_exc: MaybeCollectionStr | None = None,
51
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
52
+ table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
53
+ inserts: bool = False,
54
+ on_conflict_do_nothing: bool = False,
55
+ role: str | None = None,
35
56
  dry_run: bool = False,
36
- ) -> None:
57
+ logger: LoggerLike | None = None,
58
+ ) -> bool:
37
59
  """Run `pg_dump`."""
38
- path = Path(path)
60
+ path = _path_pg_dump(path, format_=format_)
39
61
  path.parent.mkdir(parents=True, exist_ok=True)
40
- if url.database is None:
41
- raise _PGDumpDatabaseError(url=url)
42
- if url.host is None:
43
- raise _PGDumpHostError(url=url)
44
- if url.port is None:
45
- raise _PGDumpPortError(url=url)
46
- parts: list[str] = [
47
- "pg_dump",
62
+ cmd = _build_pg_dump(
63
+ url,
64
+ path,
65
+ docker_container=docker_container,
66
+ format_=format_,
67
+ jobs=jobs,
68
+ data_only=data_only,
69
+ clean=clean,
70
+ create=create,
71
+ extension=extension,
72
+ extension_exc=extension_exc,
73
+ schema=schema,
74
+ schema_exc=schema_exc,
75
+ table=table,
76
+ table_exc=table_exc,
77
+ inserts=inserts,
78
+ on_conflict_do_nothing=on_conflict_do_nothing,
79
+ role=role,
80
+ )
81
+ if dry_run:
82
+ if logger is not None:
83
+ to_logger(logger).info("Would run:\n\t%r", str(cmd))
84
+ return True
85
+ with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
86
+ try:
87
+ output = await stream_command(cmd)
88
+ except KeyboardInterrupt:
89
+ if logger is not None:
90
+ to_logger(logger).info(
91
+ "Cancelled backup to %r after %s", str(path), timer
92
+ )
93
+ rmtree(path, ignore_errors=True)
94
+ return False
95
+ if output.return_code != 0:
96
+ if logger is not None:
97
+ to_logger(logger).exception(
98
+ "Backup to %r failed after %s\nstderr:\n%s",
99
+ str(path),
100
+ timer,
101
+ output.stderr,
102
+ )
103
+ rmtree(path, ignore_errors=True)
104
+ return False
105
+ if logger is not None: # pragma: no cover
106
+ to_logger(logger).info("Backup to %r finished after %s", str(path), timer)
107
+ return True # pragma: no cover
108
+
109
+
110
+ def _build_pg_dump(
111
+ url: URL,
112
+ path: PathLike,
113
+ /,
114
+ *,
115
+ docker_container: str | None = None,
116
+ format_: _PGDumpFormat = "plain",
117
+ jobs: int | None = None,
118
+ data_only: bool = False,
119
+ clean: bool = False,
120
+ create: bool = False,
121
+ extension: MaybeCollectionStr | None = None,
122
+ extension_exc: MaybeCollectionStr | None = None,
123
+ schema: MaybeCollectionStr | None = None,
124
+ schema_exc: MaybeCollectionStr | None = None,
125
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
126
+ table_exc: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
127
+ inserts: bool = False,
128
+ on_conflict_do_nothing: bool = False,
129
+ role: str | None = None,
130
+ ) -> str:
131
+ extracted = extract_url(url)
132
+ path = _path_pg_dump(path, format_=format_)
133
+ parts: list[str] = ["pg_dump"]
134
+ if docker_container is not None:
135
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
136
+ parts.extend([
48
137
  # general options
49
- f"--dbname={url.database}",
50
138
  f"--file={str(path)!r}",
51
139
  f"--format={format_}",
52
140
  "--verbose",
53
141
  # output options
142
+ *_resolve_data_only_and_clean(data_only=data_only, clean=clean),
54
143
  "--large-objects",
55
- "--clean",
56
144
  "--no-owner",
57
145
  "--no-privileges",
58
- "--if-exists",
59
146
  # connection options
60
- f"--host={url.host}",
61
- f"--port={url.port}",
147
+ f"--dbname={extracted.database}",
148
+ f"--host={extracted.host}",
149
+ f"--port={extracted.port}",
150
+ f"--username={extracted.username}",
62
151
  "--no-password",
63
- ]
152
+ ])
64
153
  if (format_ == "directory") and (jobs is not None):
65
154
  parts.append(f"--jobs={jobs}")
66
- if schemas is not None:
67
- parts.extend([f"--schema={s}" for s in always_iterable(schemas)])
68
- if tables is not None:
69
- parts.extend([f"--table={get_table_name(t)}" for t in always_iterable(tables)])
70
- if url.username is not None:
71
- parts.append(f"--username={url.username}")
72
- cmd = " ".join(parts)
155
+ if create:
156
+ parts.append("--create")
157
+ if extension is not None:
158
+ parts.extend([f"--extension={e}" for e in always_iterable(extension)])
159
+ if extension_exc is not None:
160
+ parts.extend([
161
+ f"--exclude-extension={e}" for e in always_iterable(extension_exc)
162
+ ])
163
+ if schema is not None:
164
+ parts.extend([f"--schema={s}" for s in always_iterable(schema)])
165
+ if schema_exc is not None:
166
+ parts.extend([f"--exclude-schema={s}" for s in always_iterable(schema_exc)])
167
+ if table is not None:
168
+ parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(table)])
169
+ if table_exc is not None:
170
+ parts.extend([
171
+ f"--exclude-table={_get_table_name(t)}" for t in always_iterable(table_exc)
172
+ ])
173
+ if inserts:
174
+ parts.append("--inserts")
175
+ if on_conflict_do_nothing:
176
+ parts.append("--on-conflict-do-nothing")
177
+ if role is not None:
178
+ parts.append(f"--role={role}")
179
+ return " ".join(parts)
180
+
181
+
182
+ def _path_pg_dump(path: PathLike, /, *, format_: _PGDumpFormat = "plain") -> Path:
183
+ match format_:
184
+ case "plain":
185
+ suffix = ".sql"
186
+ case "custom":
187
+ suffix = ".pgdump"
188
+ case "directory":
189
+ suffix = None
190
+ case "tar":
191
+ suffix = ".tar"
192
+ case never:
193
+ assert_never(never)
194
+ path = Path(path)
195
+ if suffix is not None:
196
+ path = ensure_suffix(path, suffix)
197
+ return path
198
+
199
+
200
+ ##
201
+
202
+
203
+ async def restore(
204
+ url: URL,
205
+ path: PathLike,
206
+ /,
207
+ *,
208
+ psql: bool = False,
209
+ data_only: bool = False,
210
+ clean: bool = False,
211
+ create: bool = False,
212
+ jobs: int | None = None,
213
+ schema: MaybeCollectionStr | None = None,
214
+ schema_exc: MaybeCollectionStr | None = None,
215
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
216
+ role: str | None = None,
217
+ docker_container: str | None = None,
218
+ dry_run: bool = False,
219
+ logger: LoggerLike | None = None,
220
+ ) -> bool:
221
+ """Run `pg_restore`/`psql`."""
222
+ cmd = _build_pg_restore_or_psql(
223
+ url,
224
+ path,
225
+ psql=psql,
226
+ data_only=data_only,
227
+ clean=clean,
228
+ create=create,
229
+ jobs=jobs,
230
+ schema=schema,
231
+ schema_exc=schema_exc,
232
+ table=table,
233
+ role=role,
234
+ docker_container=docker_container,
235
+ )
73
236
  if dry_run:
74
237
  if logger is not None:
75
- get_logger(logger=logger).info("Would run %r", str(path))
76
- return
238
+ to_logger(logger).info("Would run:\n\t%r", str(cmd))
239
+ return True
77
240
  with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
78
241
  try:
79
242
  output = await stream_command(cmd)
80
243
  except KeyboardInterrupt:
81
244
  if logger is not None:
82
- get_logger(logger=logger).info(
83
- "Cancelled backup to %r after %s", str(path), timer
245
+ to_logger(logger).info(
246
+ "Cancelled restore from %r after %s", str(path), timer
84
247
  )
85
- rmtree(path, ignore_errors=True)
86
- else:
87
- match output.return_code:
88
- case 0:
89
- if logger is not None:
90
- get_logger(logger=logger).info(
91
- "Backup to %r finished after %s", str(path), timer
92
- )
93
- case _:
94
- if logger is not None:
95
- get_logger(logger=logger).exception(
96
- "Backup to %r failed after %s\nstderr:\n%s",
97
- str(path),
98
- timer,
99
- output.stderr,
100
- )
101
- rmtree(path, ignore_errors=True)
102
-
103
-
104
- @dataclass(kw_only=True, slots=True)
105
- class PGDumpError(Exception):
106
- url: URL
107
-
108
-
109
- @dataclass(kw_only=True, slots=True)
110
- class _PGDumpDatabaseError(PGDumpError):
111
- @override
112
- def __str__(self) -> str:
113
- return f"Expected URL to contain a 'database'; got {self.url}"
114
-
115
-
116
- @dataclass(kw_only=True, slots=True)
117
- class _PGDumpHostError(PGDumpError):
118
- @override
119
- def __str__(self) -> str:
120
- return f"Expected URL to contain a 'host'; got {self.url}"
248
+ return False
249
+ if output.return_code != 0:
250
+ if logger is not None:
251
+ to_logger(logger).exception(
252
+ "Restore from %r failed after %s\nstderr:\n%s",
253
+ str(path),
254
+ timer,
255
+ output.stderr,
256
+ )
257
+ return False
258
+ if logger is not None: # pragma: no cover
259
+ to_logger(logger).info("Restore from %r finished after %s", str(path), timer)
260
+ return True # pragma: no cover
121
261
 
122
262
 
123
- @dataclass(kw_only=True, slots=True)
124
- class _PGDumpPortError(PGDumpError):
125
- @override
126
- def __str__(self) -> str:
127
- return f"Expected URL to contain a 'port'; got {self.url}"
263
+ ##
128
264
 
129
265
 
130
- ##
266
+ def _build_pg_restore_or_psql(
267
+ url: URL,
268
+ path: PathLike,
269
+ /,
270
+ *,
271
+ psql: bool = False,
272
+ data_only: bool = False,
273
+ clean: bool = False,
274
+ create: bool = False,
275
+ jobs: int | None = None,
276
+ schema: MaybeCollectionStr | None = None,
277
+ schema_exc: MaybeCollectionStr | None = None,
278
+ table: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
279
+ role: str | None = None,
280
+ docker_container: str | None = None,
281
+ ) -> str:
282
+ path = Path(path)
283
+ if (path.suffix == ".sql") or psql:
284
+ return _build_psql(url, path, docker_container=docker_container)
285
+ return _build_pg_restore(
286
+ url,
287
+ path,
288
+ data_only=data_only,
289
+ clean=clean,
290
+ create=create,
291
+ jobs=jobs,
292
+ schemas=schema,
293
+ schemas_exc=schema_exc,
294
+ tables=table,
295
+ role=role,
296
+ docker_container=docker_container,
297
+ )
131
298
 
132
299
 
133
- async def pg_restore(
300
+ def _build_pg_restore(
134
301
  url: URL,
135
302
  path: PathLike,
136
303
  /,
137
304
  *,
138
- database: str | None = None,
139
305
  data_only: bool = False,
306
+ clean: bool = False,
307
+ create: bool = False,
140
308
  jobs: int | None = None,
141
- schemas: MaybeListStr | None = None,
142
- tables: MaybeSequence[TableOrORMInstOrClass] | None = None,
143
- logger: LoggerOrName | None = None,
144
- dry_run: bool = False,
145
- ) -> None:
309
+ schemas: MaybeCollectionStr | None = None,
310
+ schemas_exc: MaybeCollectionStr | None = None,
311
+ tables: MaybeCollection[TableOrORMInstOrClass | str] | None = None,
312
+ role: str | None = None,
313
+ docker_container: str | None = None,
314
+ ) -> str:
146
315
  """Run `pg_restore`."""
147
- match database, url.database:
148
- case str() as database_use, _:
149
- ...
150
- case None, str() as database_use:
151
- ...
152
- case None, None:
153
- raise _PGRestoreDatabaseError(url=url)
154
- case _ as never:
155
- assert_never(never)
156
- if url.host is None:
157
- raise _PGRestoreHostError(url=url)
158
- if url.port is None:
159
- raise _PGRestorePortError(url=url)
160
- parts: list[str] = [
161
- "pg_restore",
316
+ extracted = extract_url(url)
317
+ parts: list[str] = ["pg_restore"]
318
+ if docker_container is not None:
319
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
320
+ parts.extend([
162
321
  # general options
163
- f"--dbname={database_use}",
164
322
  "--verbose",
165
323
  # restore options
324
+ *_resolve_data_only_and_clean(data_only=data_only, clean=clean),
166
325
  "--exit-on-error",
167
326
  "--no-owner",
168
327
  "--no-privileges",
169
- "--if-exists",
170
328
  # connection options
171
- f"--host={url.host}",
172
- f"--port={url.port}",
329
+ f"--host={extracted.host}",
330
+ f"--port={extracted.port}",
331
+ f"--username={extracted.username}",
332
+ f"--dbname={extracted.database}",
173
333
  "--no-password",
174
- ]
175
- if data_only:
176
- parts.append("--data-only")
177
- else:
178
- parts.append("--clean")
334
+ ])
335
+ if create:
336
+ parts.append("--create")
179
337
  if jobs is not None:
180
338
  parts.append(f"--jobs={jobs}")
181
339
  if schemas is not None:
182
340
  parts.extend([f"--schema={s}" for s in always_iterable(schemas)])
341
+ if schemas_exc is not None:
342
+ parts.extend([f"--exclude-schema={s}" for s in always_iterable(schemas_exc)])
183
343
  if tables is not None:
184
- parts.extend([f"--table={get_table_name(t)}" for t in always_iterable(tables)])
185
- if url.username is not None:
186
- parts.append(f"--username={url.username}")
344
+ parts.extend([f"--table={_get_table_name(t)}" for t in always_iterable(tables)])
345
+ if role is not None:
346
+ parts.append(f"--role={role}")
187
347
  parts.append(str(path))
188
- cmd = " ".join(parts)
189
- if dry_run:
190
- if logger is not None:
191
- get_logger(logger=logger).info("Would run %r", str(path))
192
- return
193
- with temp_environ(PGPASSWORD=url.password), Timer() as timer: # pragma: no cover
194
- try:
195
- output = await stream_command(cmd)
196
- except KeyboardInterrupt:
197
- if logger is not None:
198
- get_logger(logger=logger).info(
199
- "Cancelled restore from %r after %s", str(path), timer
200
- )
201
- else:
202
- match output.return_code:
203
- case 0:
204
- if logger is not None:
205
- get_logger(logger=logger).info(
206
- "Restore from %r finished after %s", str(path), timer
207
- )
208
- case _:
209
- if logger is not None:
210
- get_logger(logger=logger).exception(
211
- "Restore from %r failed after %s\nstderr:\n%s",
212
- str(path),
213
- timer,
214
- output.stderr,
215
- )
348
+ return " ".join(parts)
216
349
 
217
350
 
218
- @dataclass(kw_only=True, slots=True)
219
- class PGRestoreError(Exception):
220
- url: URL
351
+ def _build_psql(
352
+ url: URL, path: PathLike, /, *, docker_container: str | None = None
353
+ ) -> str:
354
+ """Run `psql`."""
355
+ extracted = extract_url(url)
356
+ parts: list[str] = ["psql"]
357
+ if docker_container is not None:
358
+ parts = docker_exec_cmd(docker_container, *parts, PGPASSWORD=extracted.password)
359
+ parts.extend([
360
+ # general options
361
+ f"--dbname={extracted.database}",
362
+ f"--file={str(path)!r}",
363
+ # connection options
364
+ f"--host={extracted.host}",
365
+ f"--port={extracted.port}",
366
+ f"--username={extracted.username}",
367
+ "--no-password",
368
+ ])
369
+ return " ".join(parts)
221
370
 
222
371
 
223
- @dataclass(kw_only=True, slots=True)
224
- class _PGRestoreDatabaseError(PGRestoreError):
225
- @override
226
- def __str__(self) -> str:
227
- return f"Expected URL to contain a 'database'; got {self.url}"
372
+ ##
228
373
 
229
374
 
230
- @dataclass(kw_only=True, slots=True)
231
- class _PGRestoreHostError(PGRestoreError):
232
- @override
233
- def __str__(self) -> str:
234
- return f"Expected URL to contain a 'host'; got {self.url}"
375
+ def _get_table_name(obj: TableOrORMInstOrClass | str, /) -> str:
376
+ match obj:
377
+ case Table() | DeclarativeBase() | type() as table_or_orm:
378
+ return get_table_name(table_or_orm)
379
+ case str() as name:
380
+ return name
381
+ case never:
382
+ assert_never(never)
383
+
384
+
385
+ def _resolve_data_only_and_clean(
386
+ *, data_only: bool = False, clean: bool = False
387
+ ) -> list[str]:
388
+ match data_only, clean:
389
+ case False, False:
390
+ return []
391
+ case True, False:
392
+ return ["--data-only"]
393
+ case False, True:
394
+ return ["--clean", "--if-exists"]
395
+ case True, True:
396
+ raise _ResolveDataOnlyAndCleanError
397
+ case never:
398
+ assert_never(never)
235
399
 
236
400
 
237
401
  @dataclass(kw_only=True, slots=True)
238
- class _PGRestorePortError(PGRestoreError):
402
+ class _ResolveDataOnlyAndCleanError(Exception):
239
403
  @override
240
404
  def __str__(self) -> str:
241
- return f"Expected URL to contain a 'port'; got {self.url}"
405
+ return "Cannot use '--data-only' and '--clean' together"
242
406
 
243
407
 
244
- __all__ = ["PGDumpError", "PGRestoreError", "pg_dump", "pg_restore"]
408
+ __all__ = ["pg_dump", "restore"]