pi-line 1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,51 @@
1
+ name: Publish to PyPI
2
+
3
+ on:
4
+ push:
5
+ tags:
6
+ - "v*"
7
+
8
+ jobs:
9
+ test:
10
+ name: Run tests
11
+ runs-on: ubuntu-latest
12
+ steps:
13
+ - uses: actions/checkout@v4
14
+ with:
15
+ fetch-depth: 0 # needed for setuptools-scm to read tags
16
+
17
+ - uses: actions/setup-python@v5
18
+ with:
19
+ python-version: "3.12"
20
+
21
+ - name: Install dependencies
22
+ run: pip install -e ".[dev]"
23
+
24
+ - name: Run tests
25
+ run: pytest
26
+
27
+ publish:
28
+ name: Build and publish
29
+ needs: test
30
+ runs-on: ubuntu-latest
31
+ environment: pypi
32
+ permissions:
33
+ id-token: write # required for PyPI Trusted Publisher
34
+
35
+ steps:
36
+ - uses: actions/checkout@v4
37
+ with:
38
+ fetch-depth: 0 # needed for setuptools-scm to read tags
39
+
40
+ - uses: actions/setup-python@v5
41
+ with:
42
+ python-version: "3.12"
43
+
44
+ - name: Install build
45
+ run: pip install build
46
+
47
+ - name: Build package
48
+ run: python -m build
49
+
50
+ - name: Publish to PyPI
51
+ uses: pypa/gh-action-pypi-publish@release/v1
pi_line-1.0/.gitignore ADDED
@@ -0,0 +1,39 @@
1
+ # Python
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+ *.so
6
+ *.egg-info/
7
+ dist/
8
+ build/
9
+ *.egg
10
+
11
+ # Virtual environments
12
+ .venv/
13
+ venv/
14
+ env/
15
+
16
+ # IDE
17
+ .vscode/
18
+ .idea/
19
+ *.swp
20
+ *.swo
21
+
22
+ # Testing / Linting caches
23
+ .pytest_cache/
24
+ .ruff_cache/
25
+ .mypy_cache/
26
+ .coverage
27
+ htmlcov/
28
+
29
+ # piline runtime
30
+ .piline/
31
+
32
+ # OS
33
+ .DS_Store
34
+ Thumbs.db
35
+
36
+ # Environment
37
+ .env
38
+
39
+ generate_docs.py
pi_line-1.0/PKG-INFO ADDED
@@ -0,0 +1,125 @@
1
+ Metadata-Version: 2.4
2
+ Name: pi-line
3
+ Version: 1.0
4
+ Summary: Zero-dependency parallel script runner.
5
+ Author-email: Maximilian Todea <demon.and.max@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/importt-ant/pi-line
8
+ Project-URL: Repository, https://github.com/importt-ant/pi-line
9
+ Project-URL: Issues, https://github.com/importt-ant/pi-line/issues
10
+ Keywords: python,automation,pipeline,devtools,parallel,task-runner,process-pool,zero-dependencies
11
+ Classifier: Development Status :: 5 - Production/Stable
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Requires-Python: >=3.10
20
+ Description-Content-Type: text/markdown
21
+ Provides-Extra: dev
22
+ Requires-Dist: pytest>=7.4; extra == "dev"
23
+ Requires-Dist: pytest-cov>=4.1; extra == "dev"
24
+ Requires-Dist: ruff>=0.4; extra == "dev"
25
+
26
+ # 🥧〽️❗: pi-line
27
+
28
+ A zero-dependency parallel script runner for Python.
29
+
30
+ Define units of work (**Pi**), execute them in parallel with a **Runner**, or feed them continuously through a **Line**.
31
+
32
+ ## Install
33
+
34
+ ```bash
35
+ pip install pi-line
36
+ ```
37
+
38
+ ## Quick start
39
+
40
+ ### Run scripts in parallel
41
+
42
+ ```python
43
+ from piline import Pi, Runner
44
+
45
+ pis = [
46
+ Pi(name="train", script="train.py", args=["--epochs", "10"]),
47
+ Pi(name="eval", script="eval.py"),
48
+ ]
49
+
50
+ runner = Runner(max_workers=4)
51
+ results = runner.run(pis)
52
+
53
+ for r in results:
54
+ print(f"{r.pi_name}: {'PASS' if r.succeeded else 'FAIL'}")
55
+ ```
56
+
57
+ ### Continuous feed with a Line
58
+
59
+ ```python
60
+ from piline import Pi, Runner, Line
61
+
62
+ runner = Runner(max_workers=4)
63
+
64
+ with Line(runner, max_results=5000) as line:
65
+ line.put(Pi(name="job1", script="job.py"))
66
+ line.put(Pi(name="job2", script="job.py"))
67
+
68
+ # look up a result by id
69
+ result = line.get(pi_id)
70
+
71
+ # flush results to external storage
72
+ batch = line.drain_results()
73
+ # line.stop() called automatically
74
+ ```
75
+
76
+ ### Argument templates
77
+
78
+ Use `{artefact_dir}` and `{task_dir}` placeholders in args — they get resolved to the real paths before execution:
79
+
80
+ ```python
81
+ Pi(
82
+ name="train",
83
+ script="train.py",
84
+ args=["--output", "{artefact_dir}/model.pt"],
85
+ )
86
+ ```
87
+
88
+ ## Concepts
89
+
90
+ - **Pi** — A unit of work. Wraps a script with args, env vars, and an optional timeout. Gets a unique ID on creation. `.py` files run with Python; anything else runs directly.
91
+ - **Result** — Outcome of running a Pi. Exit code, timing, paths to stdout/stderr logs and artefacts.
92
+ - **Runner** — Executes a batch of Pi's in parallel using `ProcessPoolExecutor`.
93
+ - **Line** — Thread-safe queue that feeds Pi's to a Runner in batches. Results stored in a capped dict keyed by `pi_id`. Supports context manager, per-Pi and per-batch callbacks, and `drain_results()` for periodic flushing.
94
+
95
+ ## Output layout
96
+
97
+ ```
98
+ .piline/runs/<pi_name>/<pi_id>/
99
+ stdout.log
100
+ stderr.log
101
+ artefact/
102
+ ```
103
+
104
+ ## Environment variables
105
+
106
+ Scripts receive these env vars automatically:
107
+
108
+ - `PILINE_PI_ID` — The Pi's unique ID
109
+ - `PILINE_PI_NAME` — The Pi's name
110
+ - `PILINE_TASK_DIR` — Path to the task directory
111
+ - `PILINE_ARTEFACT_DIR` — Path to the artefact subdirectory
112
+
113
+ ## Callbacks
114
+
115
+ ```python
116
+ Line(
117
+ runner,
118
+ on_pi_complete=lambda r: print(f"Done: {r.pi_name} ({'PASS' if r.succeeded else 'FAIL'})"),
119
+ on_batch_complete=lambda results: print(f"Batch of {len(results)} finished"),
120
+ )
121
+ ```
122
+
123
+ ## API reference
124
+
125
+ See [docs/index.md](docs/index.md).
pi_line-1.0/README.md ADDED
@@ -0,0 +1,100 @@
1
+ # 🥧〽️❗: pi-line
2
+
3
+ A zero-dependency parallel script runner for Python.
4
+
5
+ Define units of work (**Pi**), execute them in parallel with a **Runner**, or feed them continuously through a **Line**.
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ pip install pi-line
11
+ ```
12
+
13
+ ## Quick start
14
+
15
+ ### Run scripts in parallel
16
+
17
+ ```python
18
+ from piline import Pi, Runner
19
+
20
+ pis = [
21
+ Pi(name="train", script="train.py", args=["--epochs", "10"]),
22
+ Pi(name="eval", script="eval.py"),
23
+ ]
24
+
25
+ runner = Runner(max_workers=4)
26
+ results = runner.run(pis)
27
+
28
+ for r in results:
29
+ print(f"{r.pi_name}: {'PASS' if r.succeeded else 'FAIL'}")
30
+ ```
31
+
32
+ ### Continuous feed with a Line
33
+
34
+ ```python
35
+ from piline import Pi, Runner, Line
36
+
37
+ runner = Runner(max_workers=4)
38
+
39
+ with Line(runner, max_results=5000) as line:
40
+ line.put(Pi(name="job1", script="job.py"))
41
+ line.put(Pi(name="job2", script="job.py"))
42
+
43
+ # look up a result by id
44
+ result = line.get(pi_id)
45
+
46
+ # flush results to external storage
47
+ batch = line.drain_results()
48
+ # line.stop() called automatically
49
+ ```
50
+
51
+ ### Argument templates
52
+
53
+ Use `{artefact_dir}` and `{task_dir}` placeholders in args — they get resolved to the real paths before execution:
54
+
55
+ ```python
56
+ Pi(
57
+ name="train",
58
+ script="train.py",
59
+ args=["--output", "{artefact_dir}/model.pt"],
60
+ )
61
+ ```
62
+
63
+ ## Concepts
64
+
65
+ - **Pi** — A unit of work. Wraps a script with args, env vars, and an optional timeout. Gets a unique ID on creation. `.py` files run with Python; anything else runs directly.
66
+ - **Result** — Outcome of running a Pi. Exit code, timing, paths to stdout/stderr logs and artefacts.
67
+ - **Runner** — Executes a batch of Pi's in parallel using `ProcessPoolExecutor`.
68
+ - **Line** — Thread-safe queue that feeds Pi's to a Runner in batches. Results stored in a capped dict keyed by `pi_id`. Supports context manager, per-Pi and per-batch callbacks, and `drain_results()` for periodic flushing.
69
+
70
+ ## Output layout
71
+
72
+ ```
73
+ .piline/runs/<pi_name>/<pi_id>/
74
+ stdout.log
75
+ stderr.log
76
+ artefact/
77
+ ```
78
+
79
+ ## Environment variables
80
+
81
+ Scripts receive these env vars automatically:
82
+
83
+ - `PILINE_PI_ID` — The Pi's unique ID
84
+ - `PILINE_PI_NAME` — The Pi's name
85
+ - `PILINE_TASK_DIR` — Path to the task directory
86
+ - `PILINE_ARTEFACT_DIR` — Path to the artefact subdirectory
87
+
88
+ ## Callbacks
89
+
90
+ ```python
91
+ Line(
92
+ runner,
93
+ on_pi_complete=lambda r: print(f"Done: {r.pi_name} ({'PASS' if r.succeeded else 'FAIL'})"),
94
+ on_batch_complete=lambda results: print(f"Batch of {len(results)} finished"),
95
+ )
96
+ ```
97
+
98
+ ## API reference
99
+
100
+ See [docs/index.md](docs/index.md).
@@ -0,0 +1,8 @@
1
+ # API Reference
2
+
3
+ | Module | Public API | Description |
4
+ |---|---|---|
5
+ | [pi.md](pi.md) | `Pi`, `Result` | Pi and Result data models. |
6
+ | [runner.md](runner.md) | `Runner` | Parallel Pi executor using ProcessPoolExecutor. |
7
+ | [line.md](line.md) | `Line` | Thread-safe feed queue that dispatches Pi's to a Runner. |
8
+ | [worker.md](worker.md) | `execute_pi` | Single-Pi subprocess execution. |
@@ -0,0 +1,152 @@
1
+ > Thread-safe feed queue that dispatches Pi's to a Runner.
2
+
3
+ ---
4
+
5
+ ## `Line`
6
+
7
+ Thread-safe queue that feeds Pi's to a Runner from a background thread.
8
+
9
+ Put Pi's onto the Line and they get batched and dispatched to the
10
+ Runner in groups of up to `runner.max_workers`. Results are stored
11
+ in an ordered dict keyed by `pi_id`. When the dict exceeds
12
+ *max_results* entries, the oldest results are dropped (FIFO).
13
+
14
+ Use as a context manager to handle start/stop:
15
+
16
+ **Example**
17
+
18
+ ```python
19
+ runner = Runner(max_workers=4)
20
+ with Line(runner, max_results=5000) as line:
21
+ line.put(Pi(name="job1", script="job.py"))
22
+ line.put(Pi(name="job2", script="job.py"))
23
+
24
+ result = line.get(some_pi_id)
25
+ batch = line.drain_results()
26
+ # consumer stops when the with-block exits
27
+ ```
28
+
29
+ **Parameters**
30
+
31
+ | Name | Description |
32
+ |---|---|
33
+ | `runner` | The Runner that executes batches of Pi's. |
34
+ | `maxsize` | Maximum queue depth. `0` (default) means unlimited. If the queue is full, `put` blocks until space is available. |
35
+ | `max_results` | Maximum number of results to keep in memory. Oldest results are evicted when this limit is exceeded. Defaults to 2000. |
36
+ | `on_batch_complete` | Called after each batch finishes, with the list of Results from that batch. |
37
+ | `on_pi_complete` | Called once per finished Pi, with its Result. |
38
+
39
+ ### `size`
40
+
41
+ Number of Pi's waiting in the queue (not yet dispatched).
42
+
43
+ ### `empty`
44
+
45
+ `True` when nothing is queued.
46
+
47
+ ### `total_enqueued`
48
+
49
+ Cumulative count of Pi's added since creation. Keeps incrementing even after Pi's leave the queue, so it works as a progress counter.
50
+
51
+ ### `result_count`
52
+
53
+ Number of results currently stored. May be less than `total_enqueued` if results were drained or evicted.
54
+
55
+ ### `running`
56
+
57
+ `True` while the background consumer thread is alive.
58
+
59
+ ---
60
+
61
+ ### `put(pi: Pi) → str`
62
+
63
+ Add a Pi to the queue.
64
+
65
+ If the consumer is running, the Pi will be picked up and
66
+ executed in the next batch.
67
+
68
+ **Parameters**
69
+
70
+ | Name | Description |
71
+ |---|---|
72
+ | `pi` | The Pi to enqueue. |
73
+
74
+ **Returns**
75
+
76
+ `str` — The Pi's unique ID, for later lookup via `get`.
77
+
78
+ ---
79
+
80
+ ### `put_many(pis: list[Pi]) → list[str]`
81
+
82
+ Add several Pi's to the queue at once.
83
+
84
+ **Parameters**
85
+
86
+ | Name | Description |
87
+ |---|---|
88
+ | `pis` | List of Pi's to enqueue. |
89
+
90
+ **Returns**
91
+
92
+ `list[str]` — IDs of the enqueued Pi's, in the same order as *pis*.
93
+
94
+ ---
95
+
96
+ ### `get(pi_id: str) → Result | None`
97
+
98
+ Look up a result by its Pi's ID.
99
+
100
+ **Parameters**
101
+
102
+ | Name | Description |
103
+ |---|---|
104
+ | `pi_id` | The ID returned by `put`. |
105
+
106
+ **Returns**
107
+
108
+ `Result | None` — The matching Result, or `None` if the Pi hasn't finished yet or has been evicted from the results dict.
109
+
110
+ ---
111
+
112
+ ### `drain_results() → dict[str, Result]`
113
+
114
+ Remove and return all stored results.
115
+
116
+ Useful for periodic flushing to a database or file without
117
+ losing data. After this call, `result_count` is 0.
118
+
119
+ **Returns**
120
+
121
+ `dict[str, Result]` — Mapping of `pi_id` to Result for every result that was stored. Empty dict if nothing was stored.
122
+
123
+ ---
124
+
125
+ ### `start() → None`
126
+
127
+ Start the background consumer thread.
128
+
129
+ The consumer polls the queue, collects batches of up to
130
+ `runner.max_workers` Pi's, and dispatches them to the Runner.
131
+ Call `stop` (or use the context manager) to shut it down.
132
+
133
+ **Raises**
134
+
135
+ | Exception | When |
136
+ |---|---|
137
+ | `RuntimeError` | If the consumer is already running. |
138
+
139
+ ---
140
+
141
+ ### `stop(timeout: float | None = None) → None`
142
+
143
+ Stop the background consumer and wait for it to finish.
144
+
145
+ Blocks until the consumer thread exits. Any batch currently in
146
+ progress will complete before the thread ends.
147
+
148
+ **Parameters**
149
+
150
+ | Name | Description |
151
+ |---|---|
152
+ | `timeout` | Maximum seconds to wait for the thread to join. `None` (default) waits indefinitely. |
pi_line-1.0/docs/pi.md ADDED
@@ -0,0 +1,87 @@
1
+ > Pi and Result data models.
2
+
3
+ ---
4
+
5
+ ## `Pi`
6
+
7
+ A unit of work that wraps a script for parallel execution.
8
+
9
+ Each Pi has a name, a script path, optional arguments and environment
10
+ variables, and an optional timeout in seconds. A unique 12-character
11
+ hex ID is assigned on creation and cannot be overridden.
12
+
13
+ `.py` files run under the current Python interpreter; everything
14
+ else is executed directly (shell scripts, compiled binaries, etc.).
15
+
16
+ **Example**
17
+
18
+ ```python
19
+ pi = Pi(name="train", script="train.py", args=["--lr", "0.01"])
20
+ print(pi.id) # e.g. "a1b2c3d4e5f6"
21
+ ```
22
+
23
+ **Parameters**
24
+
25
+ | Name | Description |
26
+ |---|---|
27
+ | `name` | Human-readable label, also used as the directory name in output. |
28
+ | `script` | Path to the script to run. |
29
+ | `args` | Command-line arguments passed to the script. Supports `{task_dir}` and `{artefact_dir}` placeholders that are resolved to real paths before execution. |
30
+ | `env` | Extra environment variables merged into the subprocess env. |
31
+ | `timeout` | Maximum runtime in seconds. The subprocess is killed if it exceeds this limit. `None` means no limit. |
32
+
33
+ ---
34
+
35
+ ### `resolve_dirs(base_dir: Path | str) → tuple[Path, Path]`
36
+
37
+ Build the task and artefact directory paths for this Pi.
38
+
39
+ Directories are not created here; the worker handles that at
40
+ execution time.
41
+
42
+ **Parameters**
43
+
44
+ | Name | Description |
45
+ |---|---|
46
+ | `base_dir` | Root directory for all output. Converted to `Path` if given as a string. |
47
+
48
+ **Returns**
49
+
50
+ `tuple[Path, Path]` — `(task_dir, artefact_dir)` where `task_dir` is `<base_dir>/<name>/<id>` and `artefact_dir` is `<task_dir>/artefact`.
51
+
52
+ ---
53
+
54
+ ## `Result`
55
+
56
+ Outcome of running a single Pi.
57
+
58
+ Created by the worker after a subprocess finishes (or fails to start).
59
+ Contains the exit code, timing, log paths, and any error information.
60
+
61
+ **Example**
62
+
63
+ ```python
64
+ if result.succeeded:
65
+ print(f"{result.pi_name} passed in {result.duration_s}s")
66
+ else:
67
+ print(result.error_message)
68
+ ```
69
+
70
+ **Parameters**
71
+
72
+ | Name | Description |
73
+ |---|---|
74
+ | `pi_id` | Unique ID of the Pi that produced this result. |
75
+ | `pi_name` | Name of the Pi that produced this result. |
76
+ | `exit_code` | Subprocess return code. `None` when the process could not start or the worker crashed. |
77
+ | `started_at` | UTC timestamp when execution began. |
78
+ | `finished_at` | UTC timestamp when execution ended. |
79
+ | `duration_s` | Wall-clock duration in seconds, rounded to milliseconds. |
80
+ | `task_dir` | Absolute path to the task output directory. |
81
+ | `artefact_dir` | Absolute path to the artefact subdirectory. |
82
+ | `error_message` | Human-readable error for timeouts, launch failures, or worker crashes. `None` on success. |
83
+ | `timed_out` | `True` if the subprocess was killed for exceeding its timeout. |
84
+
85
+ ### `succeeded`
86
+
87
+ `True` when exit_code is 0 and the Pi did not time out.
@@ -0,0 +1,52 @@
1
+ > Parallel Pi executor using ProcessPoolExecutor.
2
+
3
+ ---
4
+
5
+ ## `Runner`
6
+
7
+ Runs a batch of Pi's in parallel using a process pool.
8
+
9
+ Each Pi gets its own output directory under *base_dir*:
10
+ `<base_dir>/<pi_name>/<pi_id>/`, containing `stdout.log`,
11
+ `stderr.log`, and an `artefact/` subdirectory.
12
+
13
+ `results` is reset on every call to `run`.
14
+
15
+ **Example**
16
+
17
+ ```python
18
+ runner = Runner(base_dir="/tmp/runs", max_workers=4)
19
+ results = runner.run([
20
+ Pi(name="train", script="train.py"),
21
+ Pi(name="eval", script="eval.py"),
22
+ ])
23
+ for r in results:
24
+ print(r.pi_name, r.succeeded)
25
+ ```
26
+
27
+ **Parameters**
28
+
29
+ | Name | Description |
30
+ |---|---|
31
+ | `base_dir` | Root directory for task output. Defaults to `.piline/runs`. |
32
+ | `max_workers` | Maximum number of parallel processes. Defaults to `os.cpu_count()`, falling back to 4. |
33
+
34
+ ---
35
+
36
+ ### `run(pis: list[Pi]) → list[Result]`
37
+
38
+ Execute *pis* in parallel and return one Result per Pi.
39
+
40
+ Clears `self.results` before starting. The number of worker
41
+ processes is capped at `min(max_workers, len(pis))` so short
42
+ lists don't spawn idle processes.
43
+
44
+ **Parameters**
45
+
46
+ | Name | Description |
47
+ |---|---|
48
+ | `pis` | Pi's to execute. An empty list returns `[]` immediately. |
49
+
50
+ **Returns**
51
+
52
+ `list[Result]` — One Result per Pi, in completion order (not input order).
@@ -0,0 +1,32 @@
1
+ > Single-Pi subprocess execution.
2
+
3
+ ---
4
+
5
+ ## `execute_pi(pi: Pi, task_dir: Path, artefact_dir: str) → Result`
6
+
7
+ Run a single Pi as a subprocess and return the outcome.
8
+
9
+ Creates *task_dir* and *artefact_dir* if they don't exist, writes
10
+ stdout and stderr to log files inside *task_dir*, and populates a
11
+ `Result` with exit code, timing, and error details.
12
+
13
+ The subprocess receives four extra environment variables:
14
+ `PILINE_PI_ID`, `PILINE_PI_NAME`, `PILINE_TASK_DIR`, and
15
+ `PILINE_ARTEFACT_DIR`, plus any vars from `pi.env`.
16
+ `{task_dir}` and `{artefact_dir}` placeholders in `pi.args`
17
+ are resolved to real paths before the command is built.
18
+
19
+ Meant to run inside a `ProcessPoolExecutor` via `Runner`,
20
+ but works fine standalone.
21
+
22
+ **Parameters**
23
+
24
+ | Name | Description |
25
+ |---|---|
26
+ | `pi` | The Pi to execute. |
27
+ | `task_dir` | Directory for stdout/stderr logs. Created if missing. |
28
+ | `artefact_dir` | Directory the script can write output files to. Passed to the subprocess via `PILINE_ARTEFACT_DIR`. Created if missing. |
29
+
30
+ **Returns**
31
+
32
+ `Result` — Contains exit code, timing, log paths, and any error or timeout information.
@@ -0,0 +1,55 @@
1
+ [build-system]
2
+ requires = ["setuptools>=68.0", "setuptools-scm>=8.0"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "pi-line"
7
+ dynamic = ["version"]
8
+ description = "Zero-dependency parallel script runner."
9
+ readme = "README.md"
10
+ license = {text = "MIT"}
11
+ requires-python = ">=3.10"
12
+ authors = [
13
+ {name = "Maximilian Todea", email = "demon.and.max@gmail.com"},
14
+ ]
15
+ keywords = ["python", "automation", "pipeline", "devtools", "parallel", "task-runner", "process-pool", "zero-dependencies"]
16
+ classifiers = [
17
+ "Development Status :: 5 - Production/Stable",
18
+ "Intended Audience :: Developers",
19
+ "License :: OSI Approved :: MIT License",
20
+ "Programming Language :: Python :: 3",
21
+ "Programming Language :: Python :: 3.10",
22
+ "Programming Language :: Python :: 3.11",
23
+ "Programming Language :: Python :: 3.12",
24
+ "Programming Language :: Python :: 3.13",
25
+ ]
26
+ dependencies = []
27
+
28
+ [project.optional-dependencies]
29
+ dev = [
30
+ "pytest>=7.4",
31
+ "pytest-cov>=4.1",
32
+ "ruff>=0.4",
33
+ ]
34
+
35
+ [project.urls]
36
+ Homepage = "https://github.com/importt-ant/pi-line"
37
+ Repository = "https://github.com/importt-ant/pi-line"
38
+ Issues = "https://github.com/importt-ant/pi-line/issues"
39
+
40
+ [tool.setuptools_scm]
41
+ # version is read from git tags (e.g. v1.0 → 1.0)
42
+
43
+ [tool.setuptools.packages.find]
44
+ where = ["src"]
45
+ include = ["piline*"]
46
+
47
+ [tool.pytest.ini_options]
48
+ testpaths = ["tests"]
49
+
50
+ [tool.ruff]
51
+ target-version = "py310"
52
+ line-length = 100
53
+
54
+ [tool.ruff.lint]
55
+ select = ["E", "F", "I", "N", "W", "UP"]