common-python-tasks 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,16 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ if TYPE_CHECKING:
4
+ from collections.abc import Sequence
5
+
6
+ from poethepoet_tasks import TaskCollection
7
+
8
+ __version__ = "0.0.0"
9
+
10
+ __all__ = ["TaskCollection"]
11
+
12
+
13
+ def tasks(include_tags: Sequence[str] = tuple(), exclude_tags: Sequence[str] = tuple()):
14
+ from .tasks import tasks
15
+
16
+ return tasks(include_tags=include_tags, exclude_tags=exclude_tags)
@@ -0,0 +1,8 @@
1
+ [run]
2
+ omit =
3
+ tests/*
4
+ */vendor/*
5
+ */__main__.py
6
+ [paths]
7
+ source =
8
+ src/*
@@ -0,0 +1,10 @@
1
+ *
2
+ !dist/*.whl
3
+ !pyproject.toml
4
+ !poetry.lock
5
+ !README.md
6
+ !LICENSE
7
+
8
+ !src
9
+
10
+ !.git
@@ -0,0 +1,9 @@
1
+ [flake8]
2
+ extend-ignore = E501,E203,W503
3
+ max-line-length = 88
4
+ select = B,C,E,F,W,T4,B9
5
+ exclude =
6
+ .poetry
7
+ .venv
8
+ .dist
9
+ build
@@ -0,0 +1,4 @@
1
+ [settings]
2
+ profile=black
3
+ multi_line_output=3
4
+ extend_skip_glob=.poetry/*
@@ -0,0 +1,76 @@
1
+ # syntax=docker/dockerfile:1
2
+
3
+ ARG PYTHON_VERSION=3
4
+
5
+ FROM python:${PYTHON_VERSION} AS builder
6
+
7
+ ENV PYTHON_VERSION=${PYTHON_VERSION}
8
+
9
+ ENV PYTHONUNBUFFERED=1
10
+ ENV PYTHONDONTWRITEBYTECODE=1
11
+
12
+ ARG POETRY_VERSION
13
+
14
+ # Install Poetry and required Poetry plugins
15
+ RUN --mount=type=cache,target=/root/.cache/pip pip install "poetry==$POETRY_VERSION" "poetry-dynamic-versioning[plugin]" poetry-plugin-export tomlkit
16
+
17
+ # Build package
18
+ WORKDIR /tmp/build
19
+ COPY . /tmp/build/
20
+
21
+ # Try to export debug requirements but don't fail the build if the group doesn't exist
22
+ RUN --mount=type=cache,target=/root/.cache/pip \
23
+ if python -c "import tomlkit; t = tomlkit.load(open('pyproject.toml')); exit(0 if ('dependency-groups' in t and 'debug' in t['dependency-groups'] and t['dependency-groups']['debug']) else 1)"; then \
24
+ poetry export --only debug --without-hashes -f requirements.txt --output requirements-debug.txt; \
25
+ fi
26
+
27
+ RUN --mount=type=cache,target=/root/.cache/pypoetry poetry build --format=wheel
28
+
29
+ FROM python:${PYTHON_VERSION}-slim AS runtime
30
+
31
+ WORKDIR /
32
+
33
+ # Grab package from builder image
34
+ COPY --from=builder /tmp/build/dist/*.whl /tmp/
35
+
36
+ # Install package
37
+ RUN --mount=type=cache,target=/root/.cache/pip pip install /tmp/*.whl
38
+ RUN rm -rf /tmp/*.whl \
39
+ && rm -rf /root/.cache/pip
40
+ # Create symlinks for the package
41
+ ARG PACKAGE_NAME
42
+ ENV PACKAGE_NAME=${PACKAGE_NAME}
43
+ RUN ln -s "$(python -c "import os; from importlib import resources; print(resources.files(os.environ['PACKAGE_NAME']))")" "/_$PACKAGE_NAME" \
44
+ && ln -s "/_$PACKAGE_NAME" "/pkg" \
45
+ && rm -rf "/_$PACKAGE_NAME/__pycache__"
46
+
47
+ ENTRYPOINT ["/pkg/entrypoint.sh"]
48
+
49
+ ARG AUTHORS
50
+ ARG GIT_COMMIT
51
+ LABEL org.opencontainers.image.authors=${AUTHORS}
52
+ LABEL git.commit=${GIT_COMMIT}
53
+
54
+ # Set custom entrypoint if provided
55
+ # This entrypoint is deliberately not configurable via environment variables in order to
56
+ # ensure that the container always uses the entrypoint selected at build time. If the
57
+ # current package does not provide a console script, the entrypoint will default to python
58
+ ARG CUSTOM_ENTRYPOINT
59
+ RUN if [ -z "${CUSTOM_ENTRYPOINT}" ]; then cliScriptName=$(python -c "import os; from importlib import metadata as m; print(next((e.name for e in m.entry_points().select(group='console_scripts') if e.name==os.environ['PACKAGE_NAME'].replace('-','_')), ''))"); else cliScriptName=$CUSTOM_ENTRYPOINT; fi \
60
+ && echo "#!/bin/sh\n\n${cliScriptName:-python} \"\$@\"" >/pkg/entrypoint.sh \
61
+ && chmod +x /pkg/entrypoint.sh
62
+
63
+ # Optional debug stage: only installs debug deps if they were exported. This stage will not
64
+ # be built by default (the final stage below is the runtime image), and it will safely do
65
+ # nothing if there are no debug requirements
66
+ FROM runtime AS debug
67
+
68
+ COPY --from=builder /tmp/build /tmp/build
69
+
70
+ RUN --mount=type=cache,target=/root/.cache/pip if [ -f /tmp/build/requirements-debug.txt ] && [ -s /tmp/build/requirements-debug.txt ]; then pip install -r /tmp/build/requirements-debug.txt; fi
71
+ RUN rm -rf /tmp/build /root/.cache/pip
72
+
73
+ # Final (default) image: explicitly use runtime as the final target so debug is not used unless requested
74
+ FROM runtime AS final
75
+
76
+ RUN rm -rf /tmp/build /root/.cache/pip
@@ -0,0 +1,4 @@
1
+ [pytest]
2
+ filterwarnings =
3
+ error::DeprecationWarning
4
+ ignore:unclosed database in <sqlite3.Connection object at:ResourceWarning
@@ -0,0 +1,870 @@
1
+ import logging
2
+ import os
3
+ from collections.abc import Callable, Sequence
4
+ from functools import lru_cache
5
+ from pathlib import Path
6
+ from typing import TYPE_CHECKING, Any
7
+
8
+ if TYPE_CHECKING:
9
+ import subprocess
10
+ from typing import Literal
11
+
12
+ from poethepoet_tasks import TaskCollection
13
+
14
+
15
+ class _ColoredFormatter(logging.Formatter):
16
+ """Custom formatter with color codes for different log levels."""
17
+
18
+ COLORS = {
19
+ "WARNING": "\033[93m", # Yellow
20
+ "ERROR": "\033[91m", # Red
21
+ "CRITICAL": "\033[91m", # Red
22
+ "RESET": "\033[0m",
23
+ }
24
+
25
+ def format(self, record: logging.LogRecord) -> str:
26
+ log_color = self.COLORS.get(record.levelname, "")
27
+ record.levelname = f"\033[1m{log_color}{record.levelname}{self.COLORS['RESET'] if log_color else ''}\033[0m"
28
+ return super().format(record)
29
+
30
+
31
+ LOGGER = logging.getLogger("common_python_tasks")
32
+ handler = logging.StreamHandler()
33
+ handler.setFormatter(_ColoredFormatter("[%(asctime)s] %(levelname)s: %(message)s"))
34
+ LOGGER.addHandler(handler)
35
+ LOGGER.setLevel(
36
+ {
37
+ "DEBUG": logging.DEBUG,
38
+ "INFO": logging.INFO,
39
+ "WARNING": logging.WARNING,
40
+ "ERROR": logging.ERROR,
41
+ "CRITICAL": logging.CRITICAL,
42
+ }[os.getenv("COMMON_PYTHON_TASKS_LOG_LEVEL", "INFO").upper()]
43
+ )
44
+
45
+
46
+ def _env_truthy(env_var: str) -> bool:
47
+ return os.getenv(env_var, "").lower() in {
48
+ "1",
49
+ "true",
50
+ "yes",
51
+ "on",
52
+ "enabled",
53
+ "y",
54
+ "t",
55
+ }
56
+
57
+
58
+ @lru_cache
59
+ def _is_package_installed(package_name: str) -> bool:
60
+ """Check if a Python package is installed in the current environment."""
61
+ from importlib.util import find_spec
62
+
63
+ # Handle packages where import name differs from package name
64
+ import_name = package_name.replace("-", "_")
65
+ is_installed = find_spec(import_name) is not None
66
+ if not is_installed:
67
+ LOGGER.debug("%s is not installed, skipping", package_name)
68
+ return is_installed
69
+
70
+
71
+ def _fatal(message: str, exit_code: int = 1) -> None:
72
+ import sys
73
+
74
+ LOGGER.error(message)
75
+ sys.exit(exit_code)
76
+
77
+
78
+ def _require_package(package_name: str) -> None:
79
+ if not _is_package_installed(package_name):
80
+ _fatal(f"{package_name} is not installed")
81
+
82
+
83
+ def _run_available_tools(
84
+ tools: list[tuple[Callable, str]], none_available_message: str
85
+ ) -> None:
86
+ ran_any = False
87
+ for fn, package in tools:
88
+ if _is_package_installed(package):
89
+ fn()
90
+ ran_any = True
91
+ if not ran_any:
92
+ _fatal(none_available_message)
93
+
94
+
95
+ def _get_authors() -> list[tuple[str, str]]:
96
+ import tomllib
97
+
98
+ pyproject_data = tomllib.loads(Path("pyproject.toml").read_text(encoding="utf-8"))
99
+
100
+ def _parse_author(author: dict[str, str]) -> tuple[str, str]:
101
+ return (author.get("name") or "").strip(), (
102
+ author.get("email") or ""
103
+ ).strip().strip("<>")
104
+
105
+ return [
106
+ _parse_author(author)
107
+ for author in (pyproject_data.get("project", {}).get("authors", []))
108
+ ]
109
+
110
+
111
+ def _run_command(
112
+ command: Sequence[str],
113
+ *,
114
+ capture_output: bool = False,
115
+ acceptable_returncodes: Sequence[int] | None = None,
116
+ ) -> "subprocess.CompletedProcess":
117
+ import subprocess
118
+ from shlex import quote
119
+
120
+ if acceptable_returncodes is None:
121
+ acceptable_returncodes = {0}
122
+
123
+ command_display = " ".join([quote(arg) for arg in command])
124
+ LOGGER.debug("Running command: %s", command_display)
125
+ out = subprocess.run(
126
+ command,
127
+ capture_output=capture_output,
128
+ text=True,
129
+ )
130
+ if out.returncode not in acceptable_returncodes:
131
+ if capture_output:
132
+ stdout = out.stdout.strip() if out.stdout else ""
133
+ stderr = out.stderr.strip() if out.stderr else ""
134
+ details = ""
135
+ if stdout:
136
+ details += f"\nstdout: {stdout}"
137
+ if stderr:
138
+ details += f"\nstderr: {stderr}"
139
+ else:
140
+ details = ""
141
+ LOGGER.error(
142
+ "Command failed (exit code %d): %s%s",
143
+ out.returncode,
144
+ command_display,
145
+ details,
146
+ )
147
+
148
+ import sys
149
+
150
+ sys.exit(out.returncode)
151
+ return out
152
+
153
+
154
+ def _load_data_file(file_name: str) -> tuple[str, str]:
155
+ from importlib.resources import files
156
+
157
+ try:
158
+ data_files = files("common_python_tasks") / "data"
159
+ data_file = data_files / file_name
160
+ return (str(data_file), data_file.read_text())
161
+ except FileNotFoundError as e:
162
+ _fatal(f"Data file not found: {file_name} ({e})")
163
+
164
+
165
+ def _get_dirty_files(ignore: list[str] | None = None) -> list[str]:
166
+ if ignore is None:
167
+ ignore = []
168
+
169
+ return [
170
+ f
171
+ for f in [
172
+ line[3:]
173
+ for line in _run_command(
174
+ ["git", "status", "--porcelain"], capture_output=True
175
+ ).stdout.splitlines()
176
+ if line
177
+ ]
178
+ if f not in ignore
179
+ ]
180
+
181
+
182
+ def _get_version(files_to_ignore_as_dirty: list[str] | None = None) -> str:
183
+ from dunamai import Style, Version
184
+
185
+ if files_to_ignore_as_dirty is None:
186
+ files_to_ignore_as_dirty = []
187
+
188
+ dirty_files = _get_dirty_files(ignore=files_to_ignore_as_dirty)
189
+ LOGGER.debug("Dirty files: %s", dirty_files)
190
+
191
+ return Version.from_git().serialize(
192
+ style=Style.Pep440,
193
+ dirty=bool(dirty_files),
194
+ )
195
+
196
+
197
+ def _get_image_tag(files_to_ignore_as_dirty: list[str] | None = None) -> str:
198
+ if files_to_ignore_as_dirty is None:
199
+ files_to_ignore_as_dirty = []
200
+
201
+ return (
202
+ _get_version(files_to_ignore_as_dirty=files_to_ignore_as_dirty)
203
+ .replace(".post", "-post")
204
+ .replace(".dev", "-dev")
205
+ .replace("+", "-")
206
+ )
207
+
208
+
209
+ def _has_tags_later_in_history() -> bool:
210
+ result = _run_command(
211
+ ["git", "tag"],
212
+ capture_output=True,
213
+ acceptable_returncodes={0, 128},
214
+ )
215
+ if result.returncode != 0 or not result.stdout.strip():
216
+ # No tags exist
217
+ return False
218
+
219
+ # Check each tag to see if it's reachable from HEAD
220
+ for tag in result.stdout.strip().split("\n"):
221
+ # Check if HEAD is an ancestor of the tag's commit
222
+ # If git merge-base --is-ancestor HEAD <tag> returns 0, then HEAD is an ancestor
223
+ # If it returns 1, then HEAD is NOT an ancestor (tag is in a different branch/future)
224
+ check_result = _run_command(
225
+ ["git", "merge-base", "--is-ancestor", "HEAD", tag],
226
+ capture_output=True,
227
+ acceptable_returncodes={0, 1},
228
+ )
229
+ if check_result.returncode == 1:
230
+ # HEAD is not an ancestor of this tag, meaning the tag is later in history
231
+ return True
232
+
233
+ return False
234
+
235
+
236
+ def _get_dockerhub_username() -> str:
237
+
238
+ from getpass import getuser
239
+
240
+ return os.getenv("DOCKERHUB_USERNAME") or getuser()
241
+
242
+
243
+ def _get_package_name(use_underscores: bool = False) -> str:
244
+ import tomllib
245
+
246
+ name = os.getenv("PACKAGE_NAME") or tomllib.loads(
247
+ Path("pyproject.toml").read_text()
248
+ ).get("project", {}).get("name")
249
+ if use_underscores and name:
250
+ name = name.replace("-", "_")
251
+ return name
252
+
253
+
254
+ @lru_cache
255
+ def _read_pyproject_toml() -> dict[str, Any]:
256
+ import tomllib
257
+
258
+ return tomllib.loads(Path("pyproject.toml").read_text())
259
+
260
+
261
+ def get_config_path(
262
+ env_var_name: str,
263
+ local_config_filename: str,
264
+ data_config_filename: str,
265
+ *,
266
+ tool_name: str | None = None,
267
+ ) -> Path | None:
268
+ """Get the path to a configuration file.
269
+
270
+ Checks for configuration in the following order:
271
+ 1. If tool_name provided, check if tool.{tool_name} exists in pyproject.toml
272
+ - If it exists, return None (use pyproject.toml config)
273
+ 2. Check environment variable
274
+ 3. Check for local config file
275
+ 4. Fall back to bundled data file
276
+
277
+ Args:
278
+ env_var_name: Name of the environment variable to check
279
+ local_config_filename: Name of the local config file to look for
280
+ data_config_filename: Name of the bundled config file to use as fallback
281
+ tool_name: Optional tool name to check in pyproject.toml under [tool.{tool_name}]
282
+
283
+ Returns:
284
+ Path to config file, or None if config exists in pyproject.toml
285
+ """
286
+ # Check if config exists in pyproject.toml
287
+ if tool_name is not None:
288
+ pyproject_data = _read_pyproject_toml()
289
+ if pyproject_data.get("tool", {}).get(tool_name):
290
+ LOGGER.debug("Using [tool.%s] configuration from pyproject.toml", tool_name)
291
+ return None
292
+
293
+ # Check environment variable
294
+ if os.getenv(env_var_name):
295
+ config_path = Path(os.getenv(env_var_name))
296
+ LOGGER.debug("Using config from %s: %s", env_var_name, config_path)
297
+ return config_path
298
+
299
+ # Check for local config file
300
+ local_config_path = Path(local_config_filename)
301
+ if local_config_path.exists():
302
+ LOGGER.debug("Using local config file: %s", local_config_path)
303
+ return local_config_path
304
+
305
+ # Fall back to bundled data file
306
+ config_path = Path(_load_data_file(data_config_filename)[0])
307
+ LOGGER.debug("Using bundled config file: %s", config_path)
308
+ return config_path
309
+
310
+
311
+ tasks = TaskCollection(
312
+ envfile=[
313
+ f
314
+ for f in [
315
+ "project.properties",
316
+ ".env",
317
+ ]
318
+ if Path(f).exists()
319
+ ]
320
+ )
321
+
322
+
323
+ @tasks.script(task_name="_black", tags=["format", "internal"])
324
+ def black() -> None:
325
+ """Run black formatting."""
326
+ _require_package("black")
327
+ _run_command(["black", "--quiet", "."])
328
+
329
+
330
+ @tasks.script(task_name="_isort", tags=["format", "internal"])
331
+ def isort() -> None:
332
+ """Run isort formatting."""
333
+ _require_package("isort")
334
+ isort_config_path = get_config_path(
335
+ "ISORT_CONFIG",
336
+ ".isort.cfg",
337
+ ".isort.cfg",
338
+ tool_name="isort",
339
+ )
340
+
341
+ _run_command(
342
+ [
343
+ "isort",
344
+ "--quiet",
345
+ ".",
346
+ "--settings-path",
347
+ ]
348
+ + ([str(isort_config_path)] if isort_config_path else [])
349
+ )
350
+
351
+
352
+ @tasks.script(task_name="_autoflake", tags=["format", "internal"])
353
+ def autoflake() -> None:
354
+ """Run autoflake to remove unused imports."""
355
+ _require_package("autoflake")
356
+ _run_command(
357
+ [
358
+ "autoflake",
359
+ "--quiet",
360
+ "--remove-all-unused-imports",
361
+ "--recursive",
362
+ "-i",
363
+ ".",
364
+ ]
365
+ )
366
+
367
+
368
+ @tasks.script(task_name="_black_check", tags=["lint", "internal"])
369
+ def black_check() -> None:
370
+ """Run black in check mode."""
371
+ _require_package("black")
372
+ _run_command(["black", "--quiet", "--diff", ".", "--check"])
373
+
374
+
375
+ @tasks.script(task_name="_isort_check", tags=["lint"])
376
+ def isort_check() -> None:
377
+ """Run isort linting."""
378
+ _require_package("isort")
379
+ isort_config_path = get_config_path(
380
+ "ISORT_CONFIG",
381
+ ".isort.cfg",
382
+ ".isort.cfg",
383
+ tool_name="isort",
384
+ )
385
+
386
+ _run_command(
387
+ [
388
+ "isort",
389
+ "--quiet",
390
+ ".",
391
+ "--check-only",
392
+ "--settings-path",
393
+ ]
394
+ + ([str(isort_config_path)] if isort_config_path else [])
395
+ )
396
+
397
+
398
+ @tasks.script(task_name="_autoflake_check", tags=["lint", "internal"])
399
+ def autoflake_check() -> None:
400
+ """Run autoflake in check mode."""
401
+ _require_package("autoflake")
402
+ _run_command(
403
+ [
404
+ "autoflake",
405
+ "--quiet",
406
+ "--remove-all-unused-imports",
407
+ "--recursive",
408
+ "-cd",
409
+ ".",
410
+ ]
411
+ )
412
+
413
+
414
+ @tasks.script(task_name="_flake8_check", tags=["lint"])
415
+ def flake8_check() -> None:
416
+ """Run flake8 linting."""
417
+ _require_package("flake8")
418
+
419
+ flake8_config_path = get_config_path(
420
+ "FLAKE8_CONFIG",
421
+ ".flake8",
422
+ ".flake8",
423
+ )
424
+
425
+ _run_command(["flake8", ".", "--config", str(flake8_config_path)])
426
+
427
+
428
+ @tasks.script(tags=["test"])
429
+ def test() -> None:
430
+ """Run the test suite with coverage (if pytest-cov is installed)."""
431
+ coverage_config_path = get_config_path(
432
+ "COVERAGE_RCFILE",
433
+ ".coveragerc",
434
+ ".coveragerc",
435
+ tool_name="coverage",
436
+ )
437
+
438
+ pytest_config_path = get_config_path(
439
+ "PYTEST_CONFIG",
440
+ "pytest.ini",
441
+ "pytest.ini",
442
+ tool_name="pytest",
443
+ )
444
+
445
+ if _is_package_installed("pytest_cov"):
446
+ coverage_args = [
447
+ "--cov=" + _get_package_name(use_underscores=True),
448
+ "--cov-report=term-missing",
449
+ "--cov-report=xml:coverage.xml",
450
+ ] + (
451
+ [
452
+ "--cov-config=" + str(coverage_config_path),
453
+ ]
454
+ if coverage_config_path
455
+ else []
456
+ )
457
+ else:
458
+ coverage_args = []
459
+
460
+ exit_code = _run_command(
461
+ (
462
+ [
463
+ "pytest",
464
+ "-vv",
465
+ ]
466
+ + (
467
+ [
468
+ "-c",
469
+ str(pytest_config_path),
470
+ ]
471
+ if pytest_config_path
472
+ else []
473
+ )
474
+ + coverage_args
475
+ ),
476
+ acceptable_returncodes={0, 5},
477
+ ).returncode
478
+
479
+ if exit_code == 5:
480
+ LOGGER.warning("No tests were collected.")
481
+
482
+ import sys
483
+
484
+ sys.exit(5)
485
+
486
+
487
+ @tasks.script(task_name="clean", tags=["clean"])
488
+ def clean() -> None:
489
+ """Clean up temporary files and directories."""
490
+ import shutil
491
+
492
+ for item in [
493
+ *[Path(p) for p in [".pytest_cache", "dist", ".mypy_cache"]],
494
+ *Path(".").rglob("__pycache__"),
495
+ *Path(".").rglob("*.pyc"),
496
+ Path(".coverage"),
497
+ Path("coverage.xml"),
498
+ ]:
499
+ if item.is_dir():
500
+ shutil.rmtree(item, ignore_errors=True)
501
+ else:
502
+ item.unlink(missing_ok=True)
503
+
504
+
505
+ @tasks.script(task_name="format", tags=["format"])
506
+ def format_all() -> None:
507
+ """Format Python code with autoflake, black, and isort."""
508
+ _run_available_tools(
509
+ [
510
+ (autoflake, "autoflake"),
511
+ (black, "black"),
512
+ (isort, "isort"),
513
+ ],
514
+ "No formatting tools are installed. Install one or more of: autoflake, black, isort",
515
+ )
516
+
517
+
518
+ @tasks.script(task_name="lint", tags=["lint"])
519
+ def lint_all() -> None:
520
+ """Lint Python code with autoflake, black, isort, and flake8."""
521
+ _run_available_tools(
522
+ [
523
+ (autoflake_check, "autoflake"),
524
+ (black_check, "black"),
525
+ (isort_check, "isort"),
526
+ (flake8_check, "flake8"),
527
+ ],
528
+ "No linting tools are installed. Install one or more of: autoflake, black, isort, flake8",
529
+ )
530
+
531
+
532
+ def _build_image(
533
+ containerfile_path: Path | None = None,
534
+ containerfile_text: str | None = None,
535
+ context_path: Path | None = None,
536
+ debug: bool = False,
537
+ no_cache: bool = False,
538
+ plain: bool = False,
539
+ single_arch: bool = False,
540
+ ) -> None:
541
+ import platform
542
+
543
+ if context_path is None:
544
+ context_path = Path(".")
545
+
546
+ temp_file_path: str | None = None
547
+ if containerfile_path is None:
548
+ if containerfile_text is None:
549
+ _fatal("Either containerfile_path or containerfile_text must be provided.")
550
+ import tempfile
551
+
552
+ tf = tempfile.NamedTemporaryFile(
553
+ mode="w",
554
+ encoding="utf-8",
555
+ delete=False,
556
+ prefix="Containerfile.",
557
+ suffix=".generated",
558
+ )
559
+ temp_file_path = tf.name
560
+ with open(temp_file_path, "w", encoding="utf-8") as f:
561
+ f.write(containerfile_text)
562
+ containerfile_path = Path(temp_file_path)
563
+
564
+ # Handle .dockerignore file
565
+ dockerignore_path = context_path / ".dockerignore"
566
+ temp_dockerignore_created = False
567
+ if not dockerignore_path.exists():
568
+ LOGGER.debug("No .dockerignore found, using built-in .dockerignore")
569
+ builtin_dockerignore_content = _load_data_file(".dockerignore")[1]
570
+ dockerignore_path.write_text(builtin_dockerignore_content, encoding="utf-8")
571
+ temp_dockerignore_created = True
572
+
573
+ delete_temp_file = False
574
+ try:
575
+ archs = ["linux/amd64", "linux/arm64"] if not single_arch else None
576
+ files_to_ignore = [".dockerignore"] if temp_dockerignore_created else []
577
+ version_string = _get_image_tag(files_to_ignore_as_dirty=files_to_ignore)
578
+
579
+ if debug:
580
+ suffix = "-debug"
581
+ target = "debug"
582
+ tag = "debug"
583
+ else:
584
+ suffix = ""
585
+ target = "runtime"
586
+ # Only tag as 'latest' if there are no tags later in history
587
+ tag = "latest" if not _has_tags_later_in_history() else None
588
+
589
+ version_tag = f"{version_string}{suffix}"
590
+ commit_tag = f"{_run_command(['git', 'rev-parse', '--short', 'HEAD'], capture_output=True).stdout.strip()}{'-dirty' if _get_dirty_files(ignore=files_to_ignore) else ''}{suffix}"
591
+ python_version = platform.python_version()
592
+ poetry_version = (
593
+ _run_command(["poetry", "--version"], capture_output=True)
594
+ .stdout.strip()
595
+ .split()[-1]
596
+ )[0:-1]
597
+
598
+ build_args = {
599
+ k: v
600
+ for k, v in {
601
+ "PYTHON_VERSION": python_version,
602
+ "POETRY_VERSION": poetry_version,
603
+ "PACKAGE_NAME": _get_package_name(use_underscores=True),
604
+ "AUTHORS": ",".join(
605
+ [f"{name} <{email}>" for name, email in _get_authors()]
606
+ ),
607
+ "GIT_COMMIT": commit_tag,
608
+ "CUSTOM_ENTRYPOINT": os.getenv("CUSTOM_IMAGE_ENTRYPOINT"),
609
+ }.items()
610
+ if v is not None
611
+ }
612
+ tags_to_use = [t for t in (tag, version_tag, commit_tag) if t is not None]
613
+ LOGGER.info("Building image with tags: %s", ", ".join(tags_to_use))
614
+ build_cmd = (
615
+ [
616
+ "docker",
617
+ "build",
618
+ str(context_path),
619
+ "-f",
620
+ str(containerfile_path),
621
+ "--target",
622
+ target,
623
+ ]
624
+ + sum(
625
+ [
626
+ ["--build-arg", f"{k}={v if v is not None else ''}"]
627
+ for k, v in build_args.items()
628
+ ],
629
+ [],
630
+ )
631
+ + (["--platform", ",".join(archs)] if archs else [])
632
+ + (["--no-cache"] if no_cache else [])
633
+ + sum(
634
+ [["-t", f"{_get_package_name()}:{t}"] for t in tags_to_use],
635
+ [],
636
+ )
637
+ )
638
+ registry = os.environ.get(
639
+ "CONTAINER_REGISTRY_URL",
640
+ f"docker.io/{_get_dockerhub_username()}",
641
+ ).strip()
642
+ full_name = f"{registry}/{_get_package_name()}"
643
+ for t in tags_to_use:
644
+ build_cmd += ["-t", f"{full_name}:{t}"]
645
+
646
+ if plain:
647
+ build_cmd += ["--progress", "plain"]
648
+ _run_command(build_cmd)
649
+ delete_temp_file = True
650
+ finally:
651
+ if temp_file_path is not None and delete_temp_file:
652
+ try:
653
+ containerfile_path.unlink()
654
+ except FileNotFoundError:
655
+ pass
656
+ if temp_dockerignore_created:
657
+ try:
658
+ dockerignore_path.unlink()
659
+ except FileNotFoundError:
660
+ pass
661
+
662
+
663
+ @tasks.script(tags=["containers", "build"])
664
+ def build_image(
665
+ debug: bool = False,
666
+ no_cache: bool = False,
667
+ plain: bool = False,
668
+ single_arch: bool = False,
669
+ ) -> None:
670
+ """Build the container image for this project using the Containerfile template.
671
+
672
+ Args:
673
+ debug: Build the debug image.
674
+ no_cache: Do not use cache when building the image.
675
+ plain: Do not pretty-print output.
676
+ single_arch: Build images for a single architecture.
677
+ """
678
+ _build_image(
679
+ None,
680
+ _load_data_file("Containerfile")[1],
681
+ Path("."),
682
+ debug=debug,
683
+ no_cache=no_cache,
684
+ plain=plain,
685
+ single_arch=single_arch,
686
+ )
687
+
688
+
689
+ @tasks.script(tags=["containers"])
690
+ def run_container(tag: str = "latest") -> None:
691
+ """Run the Docker image as a container for this project.
692
+
693
+ Args:
694
+ tag: Image tag to run. Defaults to "latest".
695
+ """
696
+ _run_command(["docker", "run", "--rm", "-i", "-t", f"{_get_package_name()}:{tag}"])
697
+
698
+
699
+ @tasks.script(tags=["containers", "packaging", "release"])
700
+ def push_image(debug: bool = False) -> None:
701
+ """Push the Docker image for this project to the container registry.
702
+
703
+ Args:
704
+ debug: Push the debug image.
705
+ """
706
+
707
+ if debug:
708
+ suffix = "-debug"
709
+ tag = "debug"
710
+ else:
711
+ suffix = ""
712
+ # Only push 'latest' tag if there are no tags later in history
713
+ tag = "latest" if not _has_tags_later_in_history() else None
714
+ registry = os.environ.get(
715
+ "CONTAINER_REGISTRY_URL",
716
+ f"docker.io/{_get_dockerhub_username()}",
717
+ ).strip()
718
+ full_name = f"{registry}/{_get_package_name()}"
719
+ tags_to_push = [t for t in [tag, f"{_get_image_tag()}{suffix}"] if t is not None]
720
+ for t in tags_to_push:
721
+ full_tag = f"{full_name}:{t}"
722
+ LOGGER.info("Pushing image %s", full_tag)
723
+ _run_command(["docker", "push", full_tag])
724
+
725
+
726
+ @tasks.script(task_name="publish-package", tags=["packaging"])
727
+ def publish_package() -> None:
728
+ """Publish the package to the PyPI server."""
729
+ _run_command(["poetry", "publish"])
730
+
731
+
732
+ @tasks.script(task_name="build-package", tags=["packaging", "build"])
733
+ def build_package() -> None:
734
+ """Build the package (wheel and sdist)."""
735
+ _run_command(["poetry", "build"])
736
+
737
+
738
+ @tasks.script(tags=["packaging"])
739
+ def bump_version(
740
+ component: str = "patch",
741
+ *,
742
+ stage: str | None = None,
743
+ ) -> None:
744
+ """Bump the project version.
745
+
746
+ Args:
747
+ component: The version component to bump: "major", "minor", or "patch".".
748
+ stage: Optional pre-release stage to apply: "alpha", "beta", or "rc".
749
+ """
750
+ from dunamai import Style, Version
751
+
752
+ component = component.lower()
753
+ stage = stage.lower() if stage is not None else None
754
+
755
+ # Check if repository is dirty
756
+ if _run_command(
757
+ ["git", "status", "--porcelain"], capture_output=True
758
+ ).stdout.strip():
759
+ _fatal(
760
+ "Repository has uncommitted changes. Please commit or stash changes before bumping version."
761
+ )
762
+
763
+ # Try to get the latest tag; default to v0.0.0 if none exist
764
+ last_tag_result = _run_command(
765
+ ["git", "describe", "--tags", "--abbrev=0"],
766
+ capture_output=True,
767
+ acceptable_returncodes={0, 128},
768
+ )
769
+ last_tag = (
770
+ last_tag_result.stdout.strip() if last_tag_result.returncode == 0 else "v0.0.0"
771
+ )
772
+
773
+ # Check if current version equals the last tag, refuse to bump if so
774
+ current_version = _get_version()
775
+ # Normalize last tag by stripping leading 'v' if present
776
+ normalized_last_tag = last_tag[1:] if last_tag.startswith("v") else last_tag
777
+ if current_version == normalized_last_tag and last_tag != "v0.0.0":
778
+ _fatal(
779
+ "There have been no changes since the last version tag; cannot bump version as it would not change."
780
+ )
781
+
782
+ possible_components = ("major", "minor", "patch")
783
+ if component not in possible_components:
784
+ _fatal(
785
+ f'Invalid component "{component}". Must be one of: {"\n".join(possible_components)}'
786
+ )
787
+ component_index: "Literal[0, 1, 2]" = possible_components.index(component)
788
+
789
+ prerelease_options = {
790
+ "a": "alpha",
791
+ "alpha": "alpha",
792
+ "b": "beta",
793
+ "beta": "beta",
794
+ "rc": "rc",
795
+ }
796
+ normalized_stage = None
797
+ if stage is not None:
798
+ if stage not in prerelease_options:
799
+ _fatal(f'Invalid stage "{stage}". Must be one of: alpha, beta, rc')
800
+ normalized_stage = prerelease_options[stage]
801
+
802
+ # Bump version using dunamai
803
+ if last_tag == "v0.0.0":
804
+ # No real tags yet; bump from 0.0.0
805
+ base_version = Version.parse("0.0.0")
806
+ new_version = base_version.bump(component_index)
807
+ else:
808
+ new_version = Version.from_git().bump(component_index)
809
+
810
+ if normalized_stage is not None:
811
+ if new_version.stage != normalized_stage:
812
+ new_version.stage = normalized_stage
813
+ new_version.revision = 1
814
+ elif new_version.revision is None:
815
+ new_version.revision = 1
816
+
817
+ # Serialize without dirty flag for clean release tags
818
+ serialized = new_version.serialize(style=Style.Pep440)
819
+ LOGGER.info("Bumping version to %s", serialized)
820
+ _run_command(["git", "tag", f"v{serialized}"])
821
+
822
+
823
+ def _build(
824
+ has_containers: bool,
825
+ debug: bool = False,
826
+ no_cache: bool = False,
827
+ plain: bool = False,
828
+ single_arch: bool = False,
829
+ ) -> None:
830
+ build_package()
831
+ if has_containers:
832
+ build_image(
833
+ debug=debug,
834
+ no_cache=no_cache,
835
+ plain=plain,
836
+ single_arch=single_arch,
837
+ )
838
+
839
+
840
+ @tasks.script(
841
+ task_name="build",
842
+ tags=["packaging", "containers"],
843
+ )
844
+ def build_with_containers(
845
+ debug: bool = False,
846
+ no_cache: bool = False,
847
+ plain: bool = False,
848
+ single_arch: bool = False,
849
+ ) -> None:
850
+ """Build the project and its containers.
851
+
852
+ Args:
853
+ debug: Build the debug image.
854
+ no_cache: Do not use cache when building the image.
855
+ plain: Do not pretty-print output.
856
+ single_arch: Build images for a single architecture.
857
+ """
858
+ _build(
859
+ True,
860
+ debug=debug,
861
+ no_cache=no_cache,
862
+ plain=plain,
863
+ single_arch=single_arch,
864
+ )
865
+
866
+
867
+ @tasks.script(task_name="build", tags=["packaging"])
868
+ def build_without_containers() -> None:
869
+ """Build the project."""
870
+ _build(False)
@@ -0,0 +1,295 @@
1
+ Metadata-Version: 2.4
2
+ Name: common-python-tasks
3
+ Version: 0.0.1
4
+ Summary: Opinionated Poe the Poet tasks for Python package development.
5
+ License-Expression: MIT
6
+ License-File: LICENSE
7
+ Author: Joseph Asbury
8
+ Author-email: ci_sourcerer@yahoo.com
9
+ Requires-Python: >=3.10,<4.0
10
+ Classifier: Development Status :: 3 - Alpha
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.10
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Classifier: Programming Language :: Python :: 3.14
18
+ Classifier: Topic :: Software Development :: Build Tools
19
+ Requires-Dist: autoflake (>=2.3.1,<3.0.0)
20
+ Requires-Dist: black (>=25.11.0,<26.0.0)
21
+ Requires-Dist: dunamai (>=1.25.0,<2.0.0)
22
+ Requires-Dist: flake8 (>=7.3.0,<8.0.0)
23
+ Requires-Dist: isort (>=7.0.0,<8.0.0)
24
+ Requires-Dist: poethepoet-tasks (>=0.3.0,<0.4.0)
25
+ Requires-Dist: pytest (>=9.0.1,<10.0.0)
26
+ Requires-Dist: pytest-cov (>=7.0.0,<8.0.0)
27
+ Requires-Dist: tomlkit (>=0.13.3,<0.14.0)
28
+ Project-URL: Homepage, http://github.com/ci-sourcerer/common-python-tasks
29
+ Project-URL: issues, http://github.com/ci-sourcerer/common-python-tasks/issues
30
+ Project-URL: source, http://github.com/ci-sourcerer/common-python-tasks
31
+ Description-Content-Type: text/markdown
32
+
33
+ # Common Python tasks
34
+
35
+ This package is a collection of (very) opinionated [Poe the Poet Python tasks](https://poethepoet.natn.io/guides/packaged_tasks.html) for common Python development workflows.
36
+
37
+ ## Quick start
38
+
39
+ ### Automated setup
40
+
41
+ You can add `common-python-tasks` to a new project by using the handy automated installation script.
42
+
43
+ ```shell
44
+ curl -sSL https://api.github.com/repos/ci-sourcerer/common-python-tasks/contents/scripts/add-common-python-tasks.sh | TAGS_TO_INCLUDE="format lint test" sh
45
+ ```
46
+
47
+ This will complete the following steps.
48
+
49
+ 1. Add the latest version of `common-python-tasks` to your `pyproject.toml` dependencies
50
+ 2. Configure Poe the Poet to include only the tasks with the specified tags
51
+ 3. Install the package using Poetry
52
+
53
+ **Always review scripts before running them!** Even though I believe I write good software, it's best practice to verify any script you download from the Internet.
54
+
55
+ ### Manual setup
56
+
57
+ 1. Add `common-python-tasks` to your `pyproject.toml` and configure Poe the Poet to include the desired tasks
58
+
59
+ ```toml
60
+ [project]
61
+ name = "my-awesome-project"
62
+ version = "0.0.1"
63
+ dependencies = [
64
+ "common-python-tasks==0.0.1", # Always pin to a specific version
65
+ ]
66
+
67
+ [tool.poe]
68
+ include_script = "common_python_tasks:tasks(include_tags=['format', 'lint', 'test'])" # Include or exclude tasks by tags
69
+ ```
70
+
71
+ 2. Install the package
72
+
73
+ ```shell
74
+ poetry install
75
+ ```
76
+
77
+ 3. Run tasks
78
+
79
+ ```shell
80
+ poe format # Format your code
81
+ poe lint # Check code quality
82
+ poe test # Run tests with coverage
83
+ ```
84
+
85
+ ## Available tasks
86
+
87
+ Internal tasks are used by other tasks and are not meant to be run directly.
88
+
89
+ | Task | Description | Tags |
90
+ | - | - | - |
91
+ | `build` | Build the project; also builds container images when the `containers` tag is included | packaging, containers |
92
+ | `build-image` | Build a container image using the bundled Containerfile template | containers, build |
93
+ | `build-package` | Build the package (wheel and sdist) | packaging, build |
94
+ | `bump-version` | Bump project version and create a git tag | packaging |
95
+ | `clean` | Remove build, cache, and coverage artifacts | clean |
96
+ | `format` | Format code with autoflake, black, and isort | format |
97
+ | `lint` | Run autoflake, black, isort checks, and flake8 linting | lint |
98
+ | `publish-package` | Publish the package to PyPI via Poetry | packaging |
99
+ | `push-image` | Push container images to the configured registry | containers, packaging, release |
100
+ | `run-container` | Run the built container image with the selected tag | containers |
101
+ | `test` | Run tests with pytest and generate coverage reports | test |
102
+
103
+ ## How it works
104
+
105
+ ### Prerequisites
106
+
107
+ Your project must meet the following requirements.
108
+
109
+ - Use Poetry for dependency management
110
+ - Have a `pyproject.toml` file at the root
111
+ - Have a package name (automatically inferred from `project.name` in `pyproject.toml`, or set via `PACKAGE_NAME` environment variable)
112
+
113
+ ### Configuration precedence
114
+
115
+ Tasks that need configuration files (`pytest`, `coverage`, `flake8`, `isort`) follow this order of precedence.
116
+
117
+ 1. **`pyproject.toml` sections** - `[tool.pytest]`, `[tool.coverage]`, `[tool.isort]` take priority
118
+ 2. **Environment variables** - Override config paths (see [Environment Variables](#environment-variables))
119
+ 3. **Local config files** - `pytest.ini`, `.coveragerc`, `.flake8`, `.isort.cfg` in project root
120
+ 4. **Bundled defaults** - Sensible defaults included with this package, found in the [`src/common_python_tasks/data`](src/common_python_tasks/data) directory
121
+
122
+ You can start with zero configuration and customize as needed.
123
+
124
+ ### Environment variables
125
+
126
+ #### Configuration files
127
+
128
+ The following environment variables configure the paths to configuration files.
129
+
130
+ - `PYTEST_CONFIG` specifies the path to the pytest configuration file
131
+ - `COVERAGE_RCFILE` specifies the path to the coverage configuration file
132
+ - `FLAKE8_CONFIG` specifies the path to the flake8 configuration file
133
+ - `ISORT_CONFIG` specifies the path to the isort configuration file
134
+
135
+ #### Package/Container settings
136
+
137
+ The following environment variables configure package and container behavior.
138
+
139
+ - `PACKAGE_NAME` overrides the package name (default is from `pyproject.toml`)
140
+ - `POETRY_VERSION` overrides the Poetry version for container builds
141
+ - `DOCKERHUB_USERNAME` specifies the Docker Hub username for image tagging (default is current local user)
142
+ - `CONTAINER_REGISTRY_URL` specifies the registry URL (default is `docker.io/{username}`)
143
+ - `CUSTOM_IMAGE_ENTRYPOINT` specifies a custom entrypoint script name for containers
144
+
145
+ #### Debugging
146
+
147
+ The following environment variable enables debugging output.
148
+
149
+ - `COMMON_PYTHON_TASKS_LOG_LEVEL` should be set to `DEBUG` to see detailed configuration resolution
150
+
151
+ ### Usage examples
152
+
153
+ You can include or exclude tasks by tags in your `pyproject.toml`
154
+
155
+ #### Minimal setup
156
+
157
+ ```toml
158
+ [project]
159
+ name = "simple-cli-tool"
160
+ version = "0.0.1"
161
+ dependencies = ["common-python-tasks==0.0.1"]
162
+
163
+ [tool.poe]
164
+ include_script = "common_python_tasks:tasks(include_tags=['format', 'lint'])"
165
+ ```
166
+
167
+ Available tasks: `format`, `lint`.
168
+
169
+ #### Container-based project
170
+
171
+ ```toml
172
+ [project]
173
+ name = "containerized-app"
174
+ version = "0.0.1"
175
+ dependencies = ["common-python-tasks==0.0.1"]
176
+
177
+ [tool.poe]
178
+ include_script = "common_python_tasks:tasks(include_tags=['format', 'lint', 'test', 'containers'])"
179
+
180
+ [tool.poe.env]
181
+ DOCKERHUB_USERNAME = "myusername"
182
+ PACKAGE_NAME = "containerized-app"
183
+ ```
184
+
185
+ Available tasks: All tasks including `build-image` and `push-image`.
186
+
187
+ #### Custom pytest configuration
188
+
189
+ ```toml
190
+ [project]
191
+ name = "custom-test-setup"
192
+ dependencies = ["common-python-tasks==0.0.1"]
193
+ dynamic = ["version"]
194
+
195
+ [tool.poe]
196
+ include_script = "common_python_tasks:tasks(include_tags=['test'])"
197
+
198
+ [tool.pytest.ini_options]
199
+ testpaths = ["tests", "integration"]
200
+ addopts = "-ra"
201
+ ```
202
+
203
+ The `test` task will automatically use your `[tool.pytest.ini_options]` configuration.
204
+
205
+ ## Release workflow
206
+
207
+ The `release` tag is used to identify tasks that are part of the release process. To perform a complete release, follow these steps.
208
+
209
+ ```shell
210
+ # 1. Ensure all changes are committed
211
+ git add .
212
+ git commit -m "Prepare for release" # You probably want a better commit message than this
213
+
214
+ # 2. Bump the version (creates a git tag)
215
+ poe bump-version patch # or 'minor', 'major'; for pre-releases: poe bump-version patch --stage alpha
216
+
217
+ # 3. Build the package
218
+ poetry build
219
+
220
+ # 4. Publish to PyPI
221
+ poe publish-package
222
+
223
+ # 5. (Optional) If using containers
224
+ poe build-image
225
+ poe push-image
226
+
227
+ # 6. Push tags to remote
228
+ git push --tags
229
+ ```
230
+
231
+ ## Troubleshooting
232
+
233
+ ### "No tests were collected"
234
+
235
+ The `test` task exits with code 5 if no tests are found. You can address this in one of the following ways.
236
+
237
+ - Add tests to your `tests/` directory
238
+ - Exclude the `test` tag and simply do not run `poe test` with this configuration `include_script = "common_python_tasks:tasks(exclude_tags=['test', 'internal'])"`
239
+
240
+ ### Tasks not showing up with `poe --help`
241
+
242
+ Check your `[tool.poe]` configuration in `pyproject.toml`. Make sure you're using `include_script`, not `includes`.
243
+
244
+ ```toml
245
+ # Correct
246
+ [tool.poe]
247
+ include_script = "common_python_tasks:tasks(exclude_tags=['internal'])"
248
+
249
+ # Incorrect
250
+ [tool.poe]
251
+ includes = "common_python_tasks:tasks"
252
+ ```
253
+
254
+ ### Version bump fails with "no changes since last tag"
255
+
256
+ This is expected behavior. The `bump-version` task requires commits between the last tag and HEAD. You can resolve this in one of the following ways.
257
+
258
+ - Make changes and commit them first
259
+ - If you need to re-tag the same commit, delete the old tag (for example, `git tag -d v0.0.1`). This is not recommended. Versions should be immutable, and if you need to fix something, you should create a new patch version instead
260
+
261
+ ### Config files not being used
262
+
263
+ Check the configuration precedence (see [How it works](#how-it-works)). Use debug logging to see which config is selected.
264
+
265
+ ```shell
266
+ COMMON_PYTHON_TASKS_LOG_LEVEL=DEBUG poe test
267
+ ```
268
+
269
+ ### Container build fails with "unable to find package"
270
+
271
+ Make sure your `pyproject.toml` contains the following.
272
+
273
+ - A correct package name in `[project]`
274
+ - A package location defined with this configuration `[tool.poetry] packages = [{ include = "your_package", from = "src" }]`
275
+
276
+ ## Design choices
277
+
278
+ ### Containerfile (see [src/common_python_tasks/data/Containerfile](src/common_python_tasks/data/Containerfile))
279
+
280
+ The standard Python Containerfile incorporates several intentional design choices.
281
+
282
+ - Multi-stage build: the build stage installs Poetry and builds a wheel while the runtime stage installs only the wheel to keep the final image slim and reproducible
283
+ - Cache-aware installs mean pip and Poetry cache mounts speed up iterative builds without bloating the final image
284
+ - Explicit inputs through build args (`PYTHON_VERSION`, `POETRY_VERSION`, `PACKAGE_NAME`, `AUTHORS`, `GIT_COMMIT`, `CUSTOM_ENTRYPOINT`) make image metadata and behavior predictable and auditable
285
+ - Optional debug stage exports and installs the `debug` dependency group only when present without failing otherwise and is not part of the default final image
286
+ - Stable package path creates symlinks to the installed package so entrypoints and consumers have a consistent `/pkg` and `/_$PACKAGE_NAME` path regardless of wheel layout, which ensures that the package can be reliably imported and executed from a known location, and allows for the less common use case of reading files directly from the package path
287
+ - Safe entrypoint selection means the default entrypoint resolves the console script matching the package name while `CUSTOM_ENTRYPOINT` allows overriding at build time while keeping runtime behavior predictable
288
+ - Minimal final image uses the slim Python base, cleans wheel artifacts and caches, and sets `runtime` as the explicit final target so the debug stage is opt-in
289
+
290
+ ## Notes
291
+
292
+ - This project dogfoods itself - it uses `common-python-tasks` for its own development
293
+ - Contributions welcome! Open an issue/discussion to discuss changes before submitting a PR. I do not claim to have all the answers, and you can help determine the future of low-code solutions for Python. I am very interested in your feedback as I don't want to work in a vacuum
294
+ - Alpha status: expect breaking changes between minor versions until 1.0.0
295
+
@@ -0,0 +1,12 @@
1
+ common_python_tasks/__init__.py,sha256=jW_oC6mxA1rYTYICQZK-rwRmFTJRh5xU2KkY-LFff-E,383
2
+ common_python_tasks/data/.coveragerc,sha256=8zXwDq18wIKQV4NtW2NJYpo3nxCO4LsAChyluYTUoJc,85
3
+ common_python_tasks/data/.dockerignore,sha256=d2bDJajUbfSBcfG6WwfFaHZ-MfYKJaqGvLy6YbE5Ie4,76
4
+ common_python_tasks/data/.flake8,sha256=PzH98ak-AKcENaBCzOhI2eHvLxrneklm7KaIWY9g4so,138
5
+ common_python_tasks/data/.isort.cfg,sha256=P_hdco_n95VeXJjlUcXkChqNdct0SNDuk_n9ZaHzXPE,71
6
+ common_python_tasks/data/Containerfile,sha256=RDmwzG75yiaYPDFEs3piRo9Bw1xyRJQ_QS6wdIMGD20,3173
7
+ common_python_tasks/data/pytest.ini,sha256=RHak5QrLzKHvUBjFmaaiOYFrd_8YBRQubXEOT0NJfco,134
8
+ common_python_tasks/tasks.py,sha256=XYieEM_GaJoV8Wv_klQbxMSotYNkuzNB5QoAELH0f9k,25545
9
+ common_python_tasks-0.0.1.dist-info/METADATA,sha256=F0uVqBxJlei1G0fslzMZDDfPV4m7mWPWbfAsomJp6SU,11632
10
+ common_python_tasks-0.0.1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
11
+ common_python_tasks-0.0.1.dist-info/licenses/LICENSE,sha256=Z1X8kRRoTHQFP-gbmycMrLcfNn-2uUGmmIEH0vLbTZk,1070
12
+ common_python_tasks-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 2.2.1
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Joseph Asbury
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.