common-python-tasks 0.0.2__py3-none-any.whl → 0.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- common_python_tasks/__init__.py +2 -4
- common_python_tasks/__main__.py +35 -0
- common_python_tasks/compose.py +625 -0
- common_python_tasks/data/dockerfile_extensions/.gitkeep +0 -0
- common_python_tasks/data/fastapi/alembic.ini.j2 +52 -0
- common_python_tasks/data/fastapi/compose-base.yml.j2 +40 -0
- common_python_tasks/data/fastapi/compose-db-debug.yml.j2 +45 -0
- common_python_tasks/data/fastapi/compose-db.yml.j2 +80 -0
- common_python_tasks/data/fastapi/compose-debug.yml.j2 +30 -0
- common_python_tasks/data/generic/Dockerfile.deps.j2 +17 -0
- common_python_tasks/data/generic/Dockerfile.j2 +145 -0
- common_python_tasks/docker.py +454 -0
- common_python_tasks/env.py +406 -0
- common_python_tasks/git.py +268 -0
- common_python_tasks/github.py +515 -0
- common_python_tasks/project.py +298 -0
- common_python_tasks/tasks.py +1011 -516
- common_python_tasks/utils.py +315 -0
- common_python_tasks-0.0.3.dist-info/METADATA +415 -0
- common_python_tasks-0.0.3.dist-info/RECORD +27 -0
- {common_python_tasks-0.0.2.dist-info → common_python_tasks-0.0.3.dist-info}/WHEEL +1 -1
- {common_python_tasks-0.0.2.dist-info → common_python_tasks-0.0.3.dist-info}/licenses/LICENSE +1 -1
- common_python_tasks/data/Containerfile +0 -76
- common_python_tasks-0.0.2.dist-info/METADATA +0 -295
- common_python_tasks-0.0.2.dist-info/RECORD +0 -12
- /common_python_tasks/data/{.coveragerc → generic/.coveragerc} +0 -0
- /common_python_tasks/data/{.dockerignore → generic/.dockerignore} +0 -0
- /common_python_tasks/data/{.flake8 → generic/.flake8} +0 -0
- /common_python_tasks/data/{.isort.cfg → generic/.isort.cfg} +0 -0
- /common_python_tasks/data/{pytest.ini → generic/pytest.ini} +0 -0
common_python_tasks/__init__.py
CHANGED
|
@@ -1,18 +1,16 @@
|
|
|
1
1
|
from typing import TYPE_CHECKING
|
|
2
2
|
|
|
3
3
|
if TYPE_CHECKING:
|
|
4
|
-
from
|
|
4
|
+
from typing import Sequence
|
|
5
5
|
|
|
6
6
|
from poethepoet_tasks import TaskCollection
|
|
7
7
|
|
|
8
|
-
__version__ = "0.0.0"
|
|
9
|
-
|
|
10
8
|
__all__ = ["TaskCollection"]
|
|
11
9
|
|
|
12
10
|
|
|
13
11
|
def tasks(
|
|
14
12
|
include_tags: "Sequence[str]" = tuple(), exclude_tags: "Sequence[str]" = tuple()
|
|
15
|
-
):
|
|
13
|
+
) -> dict:
|
|
16
14
|
from .tasks import tasks
|
|
17
15
|
|
|
18
16
|
return tasks(include_tags=include_tags, exclude_tags=exclude_tags)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
__all__ = ["get_available_tasks"]
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def get_available_tasks(internal: bool = False) -> list[str]:
|
|
7
|
+
"""Return available task names for this package.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
internal: When True, include internal task names starting with '_'.
|
|
11
|
+
|
|
12
|
+
Returns:
|
|
13
|
+
A list of task names.
|
|
14
|
+
"""
|
|
15
|
+
from .tasks import tasks
|
|
16
|
+
|
|
17
|
+
return [
|
|
18
|
+
task_name
|
|
19
|
+
for task_name in tasks()["tasks"]
|
|
20
|
+
if internal or not task_name.startswith("_")
|
|
21
|
+
]
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
if __name__ == "__main__":
|
|
25
|
+
print(
|
|
26
|
+
"common_python_tasks is not intended to be run as a standalone script. Invoke a task via poethepoet.",
|
|
27
|
+
file=sys.stderr if len(sys.argv) > 1 else sys.stdout,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
if len(sys.argv) == 1:
|
|
31
|
+
print("\nAvailable tasks in this release:\n")
|
|
32
|
+
for task_name in get_available_tasks():
|
|
33
|
+
print(f" - {task_name}")
|
|
34
|
+
else:
|
|
35
|
+
sys.exit(1)
|
|
@@ -0,0 +1,625 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import platform
|
|
3
|
+
import tempfile
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from shlex import quote
|
|
6
|
+
from typing import NoReturn, Sequence
|
|
7
|
+
|
|
8
|
+
from jinja2 import Template
|
|
9
|
+
from poethepoet_tasks import TaskCollection
|
|
10
|
+
|
|
11
|
+
from . import utils
|
|
12
|
+
from .env import get_workdir_path
|
|
13
|
+
from .project import get_poetry_version
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def ensure_alembic_config(compose_type: str) -> tuple[Path | None, bool]:
|
|
17
|
+
"""Render `alembic.ini` from the bundled template when a local copy is absent.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
compose_type: The compose type to use when locating the bundled template.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
A tuple of `(path, should_cleanup)` where `path` is the rendered
|
|
24
|
+
config path and `should_cleanup` indicates whether the file should be
|
|
25
|
+
removed after use.
|
|
26
|
+
"""
|
|
27
|
+
alembic_ini_path = Path("alembic.ini")
|
|
28
|
+
if alembic_ini_path.exists():
|
|
29
|
+
utils.LOGGER.debug("Using existing alembic.ini")
|
|
30
|
+
return alembic_ini_path, False
|
|
31
|
+
|
|
32
|
+
result = utils.load_data_file(
|
|
33
|
+
"alembic.ini.j2", type_identifier=compose_type, fatal_on_missing=False
|
|
34
|
+
)
|
|
35
|
+
if result is None:
|
|
36
|
+
return None, False
|
|
37
|
+
|
|
38
|
+
_, template_content = result
|
|
39
|
+
rendered = Template(template_content).render(
|
|
40
|
+
package_name=utils.get_package_name(use_underscores=True),
|
|
41
|
+
)
|
|
42
|
+
alembic_ini_path.write_text(rendered, encoding="utf-8")
|
|
43
|
+
utils.LOGGER.debug("Rendered bundled alembic.ini.j2 to %s", alembic_ini_path)
|
|
44
|
+
return alembic_ini_path, True
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def render_file(
|
|
48
|
+
env_var_name: str,
|
|
49
|
+
local_filename: str,
|
|
50
|
+
data_filename: str,
|
|
51
|
+
render_template: bool = False,
|
|
52
|
+
type_identifier: str = "generic",
|
|
53
|
+
extra_template_vars: dict[str, str] | None = None,
|
|
54
|
+
suffix: str = ".rendered",
|
|
55
|
+
) -> tuple[Path, bool]:
|
|
56
|
+
"""Render a file template with env/local/data precedence.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
env_var_name: Environment variable name for overriding the config path.
|
|
60
|
+
local_filename: Local filename to check before falling back to bundled data.
|
|
61
|
+
data_filename: Bundled data file to use when no local config exists.
|
|
62
|
+
render_template: Whether to render the file as a Jinja2 template.
|
|
63
|
+
type_identifier: Data directory identifier for bundled resources.
|
|
64
|
+
extra_template_vars: Additional template variables to pass when rendering.
|
|
65
|
+
suffix: Suffix to use for temporary rendered files.
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
A tuple of `(Path, bool)` where the first item is the resolved file path
|
|
69
|
+
and the second item indicates whether the file should be removed later.
|
|
70
|
+
"""
|
|
71
|
+
resolved_path = utils.get_config_path(
|
|
72
|
+
env_var_name,
|
|
73
|
+
local_filename,
|
|
74
|
+
data_filename,
|
|
75
|
+
type_identifier=type_identifier,
|
|
76
|
+
)
|
|
77
|
+
if resolved_path is None:
|
|
78
|
+
utils.fatal(f"No configuration rendered for {data_filename}")
|
|
79
|
+
|
|
80
|
+
path_obj = Path(resolved_path)
|
|
81
|
+
should_template = render_template or path_obj.suffix == ".j2"
|
|
82
|
+
|
|
83
|
+
if should_template:
|
|
84
|
+
package_name = utils.get_package_name()
|
|
85
|
+
env_prefix = os.getenv(
|
|
86
|
+
"ENV_PREFIX",
|
|
87
|
+
(
|
|
88
|
+
utils.package_name_to_underscore(package_name.upper())
|
|
89
|
+
if package_name
|
|
90
|
+
else ""
|
|
91
|
+
),
|
|
92
|
+
)
|
|
93
|
+
template_vars = {
|
|
94
|
+
"PACKAGE_NAME": package_name,
|
|
95
|
+
"PACKAGE_UNDERSCORE_NAME": (
|
|
96
|
+
utils.package_name_to_underscore(package_name) if package_name else ""
|
|
97
|
+
),
|
|
98
|
+
"ENV_PREFIX": env_prefix,
|
|
99
|
+
}
|
|
100
|
+
if extra_template_vars:
|
|
101
|
+
template_vars.update(extra_template_vars)
|
|
102
|
+
rendered = Template(path_obj.read_text()).render(**template_vars)
|
|
103
|
+
tf = tempfile.NamedTemporaryFile(
|
|
104
|
+
mode="w",
|
|
105
|
+
encoding="utf-8",
|
|
106
|
+
delete=False,
|
|
107
|
+
prefix=path_obj.stem + ".",
|
|
108
|
+
suffix=suffix,
|
|
109
|
+
)
|
|
110
|
+
temp_path = Path(tf.name)
|
|
111
|
+
tf.write(rendered)
|
|
112
|
+
tf.close()
|
|
113
|
+
return temp_path, True
|
|
114
|
+
|
|
115
|
+
return path_obj, False
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def read_dotenv(path: Path, error_on_fail: bool = False) -> dict[str, str]:
|
|
119
|
+
"""Parse a simple `.env` file into a `dict` (`KEY=VALUE`; ignore comments).
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
path: Path to the `.env` file.
|
|
123
|
+
error_on_fail: Whether to raise a fatal error if parsing fails.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
A dictionary of environment variables parsed from the file.
|
|
127
|
+
"""
|
|
128
|
+
|
|
129
|
+
env: dict[str, str] = {}
|
|
130
|
+
if not path.exists():
|
|
131
|
+
return env
|
|
132
|
+
try:
|
|
133
|
+
for raw_line in path.read_text(encoding="utf-8").splitlines():
|
|
134
|
+
line = raw_line.strip()
|
|
135
|
+
if not line or line.startswith("#"):
|
|
136
|
+
continue
|
|
137
|
+
if "=" not in line:
|
|
138
|
+
continue
|
|
139
|
+
key, value = line.split("=", 1)
|
|
140
|
+
key = key.strip()
|
|
141
|
+
value = value.strip().strip('"').strip("'")
|
|
142
|
+
if key:
|
|
143
|
+
env[key] = value
|
|
144
|
+
except Exception as exc:
|
|
145
|
+
if error_on_fail:
|
|
146
|
+
utils.fatal(f"Failed to parse .env file {path}: {exc}")
|
|
147
|
+
else:
|
|
148
|
+
utils.LOGGER.debug("Failed to parse .env file %s: %s", path, exc)
|
|
149
|
+
return env
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def append_dotenv(path: Path, items: dict[str, str]) -> None:
|
|
153
|
+
"""Append key/value pairs to `.env` with a generated header comment.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
path: Path to the `.env` file.
|
|
157
|
+
items: Environment variables to append to the file.
|
|
158
|
+
"""
|
|
159
|
+
from datetime import datetime
|
|
160
|
+
|
|
161
|
+
header = f"\n# Auto-generated by common_python_tasks on {datetime.now().isoformat(timespec='seconds')}\n"
|
|
162
|
+
with path.open("a", encoding="utf-8") as f:
|
|
163
|
+
f.write(header)
|
|
164
|
+
for k, v in items.items():
|
|
165
|
+
f.write(f"{k}={v}\n")
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def get_or_generate_secret(
|
|
169
|
+
key_name: str, length_bytes: int = 32, set_in_env: bool = True
|
|
170
|
+
) -> str:
|
|
171
|
+
"""Get an env var or generate, store in `.env`, and return it.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
key_name: The environment variable key to read or generate.
|
|
175
|
+
length_bytes: Number of random bytes to use when generating a secret.
|
|
176
|
+
set_in_env: Whether to populate the secret into `os.environ`.
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
The existing or generated secret value.
|
|
180
|
+
|
|
181
|
+
Notes:
|
|
182
|
+
- Respects already-set environment variables.
|
|
183
|
+
- If not set, checks `.env` for an existing value.
|
|
184
|
+
- Otherwise generates with `secrets.token_hex(length_bytes)`, appends to `.env`,
|
|
185
|
+
logs at `INFO`, and returns the value.
|
|
186
|
+
"""
|
|
187
|
+
import secrets
|
|
188
|
+
|
|
189
|
+
existing = os.getenv(key_name)
|
|
190
|
+
if existing:
|
|
191
|
+
return existing
|
|
192
|
+
|
|
193
|
+
dotenv_path = Path(".env")
|
|
194
|
+
existing_in_file = read_dotenv(dotenv_path).get(key_name)
|
|
195
|
+
if existing_in_file:
|
|
196
|
+
if set_in_env:
|
|
197
|
+
os.environ[key_name] = existing_in_file
|
|
198
|
+
return existing_in_file
|
|
199
|
+
|
|
200
|
+
token = secrets.token_hex(length_bytes)
|
|
201
|
+
try:
|
|
202
|
+
append_dotenv(dotenv_path, {key_name: token})
|
|
203
|
+
utils.LOGGER.info("Generated %s and stored it in .env", key_name)
|
|
204
|
+
except Exception as exc:
|
|
205
|
+
utils.LOGGER.warning(
|
|
206
|
+
"Failed to persist %s to .env (%s); using in-memory only", key_name, exc
|
|
207
|
+
)
|
|
208
|
+
if set_in_env:
|
|
209
|
+
os.environ[key_name] = token
|
|
210
|
+
return token
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def ensure_secrets_generated() -> None:
|
|
214
|
+
"""Ensure required secrets exist (generate once and persist to `.env`)."""
|
|
215
|
+
get_or_generate_secret("SECRET_KEY")
|
|
216
|
+
get_or_generate_secret("DB_PASS")
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
_COMPOSE_VAR_REQUIREMENTS: dict[str, dict[str, set[str]]] = {
|
|
220
|
+
"fastapi": {
|
|
221
|
+
"compose-base": {
|
|
222
|
+
"PACKAGE_NAME",
|
|
223
|
+
"PACKAGE_UNDERSCORE_NAME",
|
|
224
|
+
"API_PORT",
|
|
225
|
+
"SECRET_KEY",
|
|
226
|
+
"ENVIRONMENT",
|
|
227
|
+
"IMAGE_TAG",
|
|
228
|
+
"PYTHON_VERSION",
|
|
229
|
+
"POETRY_VERSION",
|
|
230
|
+
},
|
|
231
|
+
"compose-db": {
|
|
232
|
+
"PACKAGE_NAME",
|
|
233
|
+
"DB_BASE",
|
|
234
|
+
"DB_USER",
|
|
235
|
+
"DB_PASS",
|
|
236
|
+
"DB_PORT",
|
|
237
|
+
"IMAGE_TAG",
|
|
238
|
+
"PYTHON_VERSION",
|
|
239
|
+
"POETRY_VERSION",
|
|
240
|
+
"POSTGRES_VERSION",
|
|
241
|
+
"WORKDIR_PATH",
|
|
242
|
+
},
|
|
243
|
+
"compose-debug": {
|
|
244
|
+
"PACKAGE_NAME",
|
|
245
|
+
"PACKAGE_UNDERSCORE_NAME",
|
|
246
|
+
"IMAGE_TAG",
|
|
247
|
+
"DEBUG_PORT",
|
|
248
|
+
},
|
|
249
|
+
"compose-db-debug": {
|
|
250
|
+
"PACKAGE_NAME",
|
|
251
|
+
"DB_BASE",
|
|
252
|
+
"DB_USER",
|
|
253
|
+
"DB_PASS",
|
|
254
|
+
"ADMINER_PORT",
|
|
255
|
+
},
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def get_required_vars_for_files(
|
|
261
|
+
compose_type: str, compose_files: list[Path]
|
|
262
|
+
) -> set[str]:
|
|
263
|
+
"""Determine required environment variables based on compose files being used.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
compose_type: The compose type (e.g., `fastapi`).
|
|
267
|
+
compose_files: List of compose file paths.
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
Set of environment variable names needed for the given files.
|
|
271
|
+
"""
|
|
272
|
+
type_requirements = _COMPOSE_VAR_REQUIREMENTS.get(compose_type, {})
|
|
273
|
+
required_vars: set[str] = set()
|
|
274
|
+
|
|
275
|
+
for file_path in compose_files:
|
|
276
|
+
file_name = file_path.name
|
|
277
|
+
for ext in (".yml", ".yaml"):
|
|
278
|
+
if file_name.endswith(ext):
|
|
279
|
+
file_name = file_name.removesuffix(ext)
|
|
280
|
+
break
|
|
281
|
+
parts = file_name.split(".")
|
|
282
|
+
base_name = (
|
|
283
|
+
parts[0]
|
|
284
|
+
if len(parts) > 1 and parts[-1].replace("_", "").replace("-", "").isalnum()
|
|
285
|
+
else file_name
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
if base_name in type_requirements:
|
|
289
|
+
required_vars.update(type_requirements[base_name])
|
|
290
|
+
|
|
291
|
+
return required_vars
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def get_compose_env(
|
|
295
|
+
image_tag: str | None = None,
|
|
296
|
+
compose_type: str | None = None,
|
|
297
|
+
compose_files: list[Path] | None = None,
|
|
298
|
+
) -> dict[str, str]:
|
|
299
|
+
"""Get environment variables for `docker compose`.
|
|
300
|
+
|
|
301
|
+
Only includes variables required by the compose files being used,
|
|
302
|
+
plus all current OS environment variables for pass-through.
|
|
303
|
+
|
|
304
|
+
Args:
|
|
305
|
+
image_tag: Docker image tag to use.
|
|
306
|
+
compose_type: The compose type (e.g., `fastapi`) for variable filtering.
|
|
307
|
+
compose_files: List of compose file paths for variable filtering.
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
`dict` of environment variables for `docker compose`.
|
|
311
|
+
"""
|
|
312
|
+
package_name = utils.get_package_name()
|
|
313
|
+
|
|
314
|
+
all_vars = {
|
|
315
|
+
"ADMINER_PORT": os.getenv("ADMINER_PORT", "8081"),
|
|
316
|
+
"API_PORT": os.getenv("API_PORT", "8080"),
|
|
317
|
+
"DB_BASE": os.getenv("DB_BASE", package_name),
|
|
318
|
+
"DB_PASS": os.getenv("DB_PASS", ""),
|
|
319
|
+
"DB_PORT": os.getenv("DB_PORT", "5432"),
|
|
320
|
+
"DB_USER": os.getenv("DB_USER", package_name),
|
|
321
|
+
"DEBUG_PORT": os.getenv("DEBUG_PORT", "5678"),
|
|
322
|
+
"ENVIRONMENT": os.getenv("ENVIRONMENT", "production"),
|
|
323
|
+
"IMAGE_TAG": image_tag or "latest",
|
|
324
|
+
"PACKAGE_NAME": package_name,
|
|
325
|
+
"PACKAGE_UNDERSCORE_NAME": utils.package_name_to_underscore(package_name),
|
|
326
|
+
"WORKDIR_PATH": get_workdir_path(),
|
|
327
|
+
"POETRY_VERSION": get_poetry_version(),
|
|
328
|
+
"POSTGRES_VERSION": os.getenv("POSTGRES_VERSION", "17"),
|
|
329
|
+
"PYTHON_VERSION": platform.python_version(),
|
|
330
|
+
"SECRET_KEY": os.getenv("SECRET_KEY", ""),
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
if compose_type and compose_files:
|
|
334
|
+
required_vars = get_required_vars_for_files(compose_type, compose_files)
|
|
335
|
+
filtered_vars = {k: v for k, v in all_vars.items() if k in required_vars}
|
|
336
|
+
else:
|
|
337
|
+
filtered_vars = all_vars
|
|
338
|
+
|
|
339
|
+
return {**os.environ, "COMPOSE_MENU": "false", **filtered_vars}
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def get_compose_type() -> str:
|
|
343
|
+
"""Get the compose type from environment variable.
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
The compose type string from `COMPOSE_TYPE`, defaulting to `fastapi`.
|
|
347
|
+
"""
|
|
348
|
+
return os.getenv("COMPOSE_TYPE", "fastapi")
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def load_compose_files(
|
|
352
|
+
debug: bool = False,
|
|
353
|
+
) -> tuple[list[Path], list[Path], list[Path]]:
|
|
354
|
+
"""Load and resolve compose files.
|
|
355
|
+
|
|
356
|
+
Args:
|
|
357
|
+
debug: Whether to include debug compose files in the result.
|
|
358
|
+
|
|
359
|
+
Returns:
|
|
360
|
+
A 3-tuple of `(compose_files, temp_compose_files, temp_config_files)`.
|
|
361
|
+
|
|
362
|
+
*temp_compose_files* are rendered YAML files consumed only by
|
|
363
|
+
`docker compose` itself and may be removed once the command finishes
|
|
364
|
+
parsing them.
|
|
365
|
+
|
|
366
|
+
*temp_config_files* are files referenced by compose `configs:` blocks
|
|
367
|
+
(e.g. `alembic.ini`) that are bind-mounted into running containers.
|
|
368
|
+
They must persist for the lifetime of the stack and should only be
|
|
369
|
+
cleaned up **after** `docker compose` down`.
|
|
370
|
+
"""
|
|
371
|
+
|
|
372
|
+
compose_files_env = os.getenv("COMPOSE_FILE")
|
|
373
|
+
if compose_files_env:
|
|
374
|
+
utils.LOGGER.debug(
|
|
375
|
+
"Using compose files from environment variable COMPOSE_FILE: %s",
|
|
376
|
+
compose_files_env,
|
|
377
|
+
)
|
|
378
|
+
return (
|
|
379
|
+
[Path(f.strip()) for f in compose_files_env.split(":") if f.strip()],
|
|
380
|
+
[],
|
|
381
|
+
[],
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
compose_type = os.environ["COMPOSE_TYPE"]
|
|
385
|
+
compose_addons_str = os.getenv("COMPOSE_ADDONS", "")
|
|
386
|
+
compose_addons = [a.strip() for a in compose_addons_str.split(":") if a.strip()]
|
|
387
|
+
|
|
388
|
+
utils.LOGGER.debug(
|
|
389
|
+
"Loading compose files for type '%s' with addons: %s%s",
|
|
390
|
+
compose_type,
|
|
391
|
+
compose_addons if compose_addons else "none",
|
|
392
|
+
" (debug mode)" if debug else "",
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
files_and_cleanups = [
|
|
396
|
+
render_file(
|
|
397
|
+
f"{compose_type.upper()}_COMPOSE_BASE",
|
|
398
|
+
"compose-base.yml",
|
|
399
|
+
"compose-base.yml.j2",
|
|
400
|
+
render_template=True,
|
|
401
|
+
type_identifier=compose_type,
|
|
402
|
+
suffix=".yml",
|
|
403
|
+
)
|
|
404
|
+
]
|
|
405
|
+
|
|
406
|
+
temp_config_files: list[Path] = []
|
|
407
|
+
addon_template_vars: dict[str, dict[str, str]] = {}
|
|
408
|
+
|
|
409
|
+
if "db" in compose_addons and compose_type == "fastapi":
|
|
410
|
+
alembic_path, alembic_cleanup = ensure_alembic_config(compose_type)
|
|
411
|
+
if alembic_path:
|
|
412
|
+
addon_template_vars["db"] = {
|
|
413
|
+
"alembic_config_path": str(alembic_path.resolve()),
|
|
414
|
+
}
|
|
415
|
+
if alembic_cleanup:
|
|
416
|
+
temp_config_files.append(alembic_path)
|
|
417
|
+
|
|
418
|
+
for addon in compose_addons:
|
|
419
|
+
files_and_cleanups.append(
|
|
420
|
+
render_file(
|
|
421
|
+
f"{compose_type.upper()}_COMPOSE_{addon.upper()}",
|
|
422
|
+
f"compose-{addon}.yml",
|
|
423
|
+
f"compose-{addon}.yml.j2",
|
|
424
|
+
render_template=True,
|
|
425
|
+
type_identifier=compose_type,
|
|
426
|
+
extra_template_vars=addon_template_vars.get(addon),
|
|
427
|
+
suffix=".yml",
|
|
428
|
+
)
|
|
429
|
+
)
|
|
430
|
+
|
|
431
|
+
if debug:
|
|
432
|
+
files_and_cleanups.append(
|
|
433
|
+
render_file(
|
|
434
|
+
f"{compose_type.upper()}_COMPOSE_DEBUG",
|
|
435
|
+
"compose-debug.yml",
|
|
436
|
+
"compose-debug.yml.j2",
|
|
437
|
+
render_template=True,
|
|
438
|
+
type_identifier=compose_type,
|
|
439
|
+
suffix=".yml",
|
|
440
|
+
)
|
|
441
|
+
)
|
|
442
|
+
for addon in compose_addons:
|
|
443
|
+
files_and_cleanups.append(
|
|
444
|
+
render_file(
|
|
445
|
+
f"{compose_type.upper()}_COMPOSE_{addon.upper()}_DEBUG",
|
|
446
|
+
f"compose-{addon}-debug.yml",
|
|
447
|
+
f"compose-{addon}-debug.yml.j2",
|
|
448
|
+
render_template=True,
|
|
449
|
+
type_identifier=compose_type,
|
|
450
|
+
suffix=".yml",
|
|
451
|
+
)
|
|
452
|
+
)
|
|
453
|
+
|
|
454
|
+
overlay_files_str = os.getenv("COMPOSE_OVERLAY_FILES", "")
|
|
455
|
+
if overlay_files_str:
|
|
456
|
+
overlay_files = [f.strip() for f in overlay_files_str.split(":") if f.strip()]
|
|
457
|
+
utils.LOGGER.debug("Adding overlay compose files: %s", overlay_files)
|
|
458
|
+
for overlay_file in overlay_files:
|
|
459
|
+
files_and_cleanups.append((Path(overlay_file), False))
|
|
460
|
+
|
|
461
|
+
compose_files = [path for path, _ in files_and_cleanups]
|
|
462
|
+
temp_compose_files = [Path(path) for path, cleanup in files_and_cleanups if cleanup]
|
|
463
|
+
|
|
464
|
+
return compose_files, temp_compose_files, temp_config_files
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def load_and_prepare_compose(
|
|
468
|
+
debug: bool = False,
|
|
469
|
+
image_tag: str | None = None,
|
|
470
|
+
) -> tuple[list[Path], list[Path], list[Path], dict[str, str]]:
|
|
471
|
+
"""Load compose files and prepare environment variables.
|
|
472
|
+
|
|
473
|
+
Args:
|
|
474
|
+
debug: Whether to include debug compose files.
|
|
475
|
+
image_tag: Optional Docker image tag to include in compose environment.
|
|
476
|
+
|
|
477
|
+
Returns:
|
|
478
|
+
A 4-tuple of `(compose_files, temp_compose_files, temp_config_files, compose_env)`.
|
|
479
|
+
"""
|
|
480
|
+
compose_files, temp_compose_files, temp_config_files = load_compose_files(
|
|
481
|
+
debug=debug
|
|
482
|
+
)
|
|
483
|
+
compose_type = get_compose_type()
|
|
484
|
+
compose_env = get_compose_env(
|
|
485
|
+
image_tag=image_tag, compose_type=compose_type, compose_files=compose_files
|
|
486
|
+
)
|
|
487
|
+
return compose_files, temp_compose_files, temp_config_files, compose_env
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
def run_docker_compose_command(
|
|
491
|
+
*args: str | None,
|
|
492
|
+
compose_files: list[Path],
|
|
493
|
+
compose_env: dict[str, str],
|
|
494
|
+
) -> None:
|
|
495
|
+
"""Run `docker compose` with standard file and env-file arguments.
|
|
496
|
+
|
|
497
|
+
Python has issues running interactive `docker compose` commands directly (e.g.
|
|
498
|
+
`up`) because of how it handles subprocess I/O and signals, so instead, for
|
|
499
|
+
those, consider using `_build_exec_script`.
|
|
500
|
+
|
|
501
|
+
Args:
|
|
502
|
+
*args: Command arguments (None values are filtered out).
|
|
503
|
+
compose_files: List of compose file paths.
|
|
504
|
+
compose_env: Environment variables for the command.
|
|
505
|
+
"""
|
|
506
|
+
|
|
507
|
+
utils.run_command(
|
|
508
|
+
[
|
|
509
|
+
*compose_cmd_prefix(compose_files),
|
|
510
|
+
*[arg for arg in args if arg is not None],
|
|
511
|
+
],
|
|
512
|
+
env=compose_env,
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def compose_cmd_prefix(
|
|
517
|
+
compose_files: list[Path], tasks: TaskCollection
|
|
518
|
+
) -> Sequence[str]:
|
|
519
|
+
"""Build the common `docker compose` command prefix with file and env-file flags.
|
|
520
|
+
|
|
521
|
+
Args:
|
|
522
|
+
compose_files: List of compose file paths to include.
|
|
523
|
+
tasks: Poe TaskCollection containing envfile paths.
|
|
524
|
+
|
|
525
|
+
Returns:
|
|
526
|
+
The full docker compose command prefix as a sequence of arguments.
|
|
527
|
+
"""
|
|
528
|
+
return [
|
|
529
|
+
"docker",
|
|
530
|
+
"compose",
|
|
531
|
+
"--project-directory",
|
|
532
|
+
str(Path.cwd()),
|
|
533
|
+
*[item for f in compose_files for item in ("-f", str(f))],
|
|
534
|
+
*[item for env_file in tasks.envfile for item in ("--env-file", str(env_file))],
|
|
535
|
+
]
|
|
536
|
+
|
|
537
|
+
|
|
538
|
+
def cleanup_temp_files(*file_lists: list[Path]) -> None:
|
|
539
|
+
"""Remove temporary files, ignoring files that are already gone.
|
|
540
|
+
|
|
541
|
+
Args:
|
|
542
|
+
*file_lists: One or more lists of temporary file paths to remove.
|
|
543
|
+
"""
|
|
544
|
+
for file_list in file_lists:
|
|
545
|
+
for temp_file in file_list:
|
|
546
|
+
try:
|
|
547
|
+
temp_file.unlink()
|
|
548
|
+
except FileNotFoundError:
|
|
549
|
+
pass
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
def build_exec_script(
|
|
553
|
+
command: Sequence[str | Path],
|
|
554
|
+
cleanup_paths: list[Path] | None = None,
|
|
555
|
+
teardown_command: Sequence[str | Path] | None = None,
|
|
556
|
+
) -> Path:
|
|
557
|
+
"""Build a self-deleting shell script that runs a command with cleanup.
|
|
558
|
+
|
|
559
|
+
The generated script:
|
|
560
|
+
|
|
561
|
+
- Self-deletes via a `trap` on `EXIT`
|
|
562
|
+
- Removes `cleanup_paths` after the command exits
|
|
563
|
+
- Optionally runs a `teardown_command` (e.g. `docker compose rm`)
|
|
564
|
+
after the main command finishes (you could also use this to re-enter a task)
|
|
565
|
+
|
|
566
|
+
Args:
|
|
567
|
+
command: The command to run as the main process.
|
|
568
|
+
cleanup_paths: Temporary files to delete after the command exits.
|
|
569
|
+
teardown_command: An optional shell command (as an arg list) to run
|
|
570
|
+
after the main command exits, e.g.
|
|
571
|
+
`['docker', 'compose', ..., 'rm', '-f', '-s', '-v']`.
|
|
572
|
+
|
|
573
|
+
Returns:
|
|
574
|
+
Absolute path to the temporary script file.
|
|
575
|
+
"""
|
|
576
|
+
script_fd = tempfile.NamedTemporaryFile(
|
|
577
|
+
mode="w",
|
|
578
|
+
encoding="utf-8",
|
|
579
|
+
delete=False,
|
|
580
|
+
prefix="compose-exec.",
|
|
581
|
+
suffix=".sh",
|
|
582
|
+
)
|
|
583
|
+
script_path = Path(script_fd.name)
|
|
584
|
+
script_path_str = str(script_path.resolve())
|
|
585
|
+
|
|
586
|
+
lines = [
|
|
587
|
+
"#!/bin/sh",
|
|
588
|
+
f"SCRIPT_PATH={quote(script_path_str)}",
|
|
589
|
+
"trap 'rm -f \"$SCRIPT_PATH\"' EXIT",
|
|
590
|
+
"",
|
|
591
|
+
" ".join(quote(str(arg)) for arg in command),
|
|
592
|
+
]
|
|
593
|
+
|
|
594
|
+
if teardown_command:
|
|
595
|
+
lines.append("")
|
|
596
|
+
lines.append("# Teardown: remove containers/volumes")
|
|
597
|
+
lines.append(" ".join(quote(str(arg)) for arg in teardown_command))
|
|
598
|
+
|
|
599
|
+
if cleanup_paths:
|
|
600
|
+
lines.append("")
|
|
601
|
+
lines.append("# Clean up temporary files")
|
|
602
|
+
for path in cleanup_paths:
|
|
603
|
+
lines.append(f"rm -f {quote(str(path))}")
|
|
604
|
+
|
|
605
|
+
lines.append("")
|
|
606
|
+
|
|
607
|
+
script_path.write_text("\n".join(lines), encoding="utf-8")
|
|
608
|
+
script_path.chmod(0o700)
|
|
609
|
+
|
|
610
|
+
return script_path
|
|
611
|
+
|
|
612
|
+
|
|
613
|
+
def exec_script(script_path: Path | str, env: dict[str, str] | None = None) -> NoReturn:
|
|
614
|
+
"""Replace the current process with the given shell script.
|
|
615
|
+
|
|
616
|
+
Args:
|
|
617
|
+
script_path: Path to the shell script to execute.
|
|
618
|
+
env: Optional environment variables to use for the executed script.
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
This function never returns; it replaces the current process.
|
|
622
|
+
"""
|
|
623
|
+
|
|
624
|
+
utils.LOGGER.debug("Exec handoff to script: %s", script_path)
|
|
625
|
+
os.execvpe("/bin/sh", ["/bin/sh", str(script_path)], env or os.environ)
|
|
File without changes
|