supypowers 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supypowers-0.1.0/.env +2 -0
- supypowers-0.1.0/.github/workflows/ci.yml +33 -0
- supypowers-0.1.0/.github/workflows/publish.yml +43 -0
- supypowers-0.1.0/.gitignore +7 -0
- supypowers-0.1.0/LICENSE +22 -0
- supypowers-0.1.0/PKG-INFO +108 -0
- supypowers-0.1.0/README.md +63 -0
- supypowers-0.1.0/examples/__init__.py +0 -0
- supypowers-0.1.0/examples/dates.py +38 -0
- supypowers-0.1.0/examples/exponents.py +42 -0
- supypowers-0.1.0/examples/misc.py +19 -0
- supypowers-0.1.0/examples/strings.py +37 -0
- supypowers-0.1.0/project.md +34 -0
- supypowers-0.1.0/pyproject.toml +38 -0
- supypowers-0.1.0/src/supypowers/__init__.py +4 -0
- supypowers-0.1.0/src/supypowers/cli.py +402 -0
- supypowers-0.1.0/src/supypowers/util.py +59 -0
- supypowers-0.1.0/src/supypowers/uv_exec.py +66 -0
- supypowers-0.1.0/src/supypowers/uv_script_metadata.py +77 -0
- supypowers-0.1.0/tests/test_cli.py +95 -0
- supypowers-0.1.0/uv.lock +7 -0
supypowers-0.1.0/.env
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
name: CI
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
pull_request:
|
|
6
|
+
|
|
7
|
+
jobs:
|
|
8
|
+
test:
|
|
9
|
+
runs-on: ubuntu-latest
|
|
10
|
+
steps:
|
|
11
|
+
- uses: actions/checkout@v4
|
|
12
|
+
|
|
13
|
+
- name: Install uv
|
|
14
|
+
uses: astral-sh/setup-uv@v3
|
|
15
|
+
|
|
16
|
+
- name: Run tests
|
|
17
|
+
run: uv run python -m unittest discover -s tests -p "test_*.py" -q
|
|
18
|
+
|
|
19
|
+
build:
|
|
20
|
+
runs-on: ubuntu-latest
|
|
21
|
+
steps:
|
|
22
|
+
- uses: actions/checkout@v4
|
|
23
|
+
|
|
24
|
+
- name: Set up Python
|
|
25
|
+
uses: actions/setup-python@v5
|
|
26
|
+
with:
|
|
27
|
+
python-version: "3.12"
|
|
28
|
+
|
|
29
|
+
- name: Build sdist+wheel
|
|
30
|
+
run: |
|
|
31
|
+
python -m pip install --upgrade pip build
|
|
32
|
+
python -m build
|
|
33
|
+
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
name: Publish to PyPI
|
|
2
|
+
|
|
3
|
+
on:
|
|
4
|
+
push:
|
|
5
|
+
tags:
|
|
6
|
+
- "v*"
|
|
7
|
+
|
|
8
|
+
permissions:
|
|
9
|
+
contents: read
|
|
10
|
+
id-token: write
|
|
11
|
+
|
|
12
|
+
jobs:
|
|
13
|
+
build:
|
|
14
|
+
runs-on: ubuntu-latest
|
|
15
|
+
steps:
|
|
16
|
+
- uses: actions/checkout@v4
|
|
17
|
+
- uses: actions/setup-python@v5
|
|
18
|
+
with:
|
|
19
|
+
python-version: "3.12"
|
|
20
|
+
- name: Build
|
|
21
|
+
run: |
|
|
22
|
+
python -m pip install --upgrade pip build
|
|
23
|
+
python -m build
|
|
24
|
+
- name: Upload artifact
|
|
25
|
+
uses: actions/upload-artifact@v4
|
|
26
|
+
with:
|
|
27
|
+
name: dist
|
|
28
|
+
path: dist/*
|
|
29
|
+
|
|
30
|
+
publish:
|
|
31
|
+
runs-on: ubuntu-latest
|
|
32
|
+
needs: build
|
|
33
|
+
environment:
|
|
34
|
+
name: pypi
|
|
35
|
+
url: https://pypi.org/p/supypowers
|
|
36
|
+
steps:
|
|
37
|
+
- uses: actions/download-artifact@v4
|
|
38
|
+
with:
|
|
39
|
+
name: dist
|
|
40
|
+
path: dist
|
|
41
|
+
- name: Publish to PyPI (trusted publishing)
|
|
42
|
+
uses: pypa/gh-action-pypi-publish@release/v1
|
|
43
|
+
|
supypowers-0.1.0/LICENSE
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Andre
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
22
|
+
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: supypowers
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Run self-contained uv Python scripts as callable, schema-documented 'superpowers'.
|
|
5
|
+
Project-URL: Homepage, https://github.com/ergodic-ai/supypowers
|
|
6
|
+
Project-URL: Repository, https://github.com/ergodic-ai/supypowers
|
|
7
|
+
Author: Andre
|
|
8
|
+
License: MIT License
|
|
9
|
+
|
|
10
|
+
Copyright (c) 2026 Andre
|
|
11
|
+
|
|
12
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
13
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
14
|
+
in the Software without restriction, including without limitation the rights
|
|
15
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
16
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
17
|
+
furnished to do so, subject to the following conditions:
|
|
18
|
+
|
|
19
|
+
The above copyright notice and this permission notice shall be included in all
|
|
20
|
+
copies or substantial portions of the Software.
|
|
21
|
+
|
|
22
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
23
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
24
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
25
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
26
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
27
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
28
|
+
SOFTWARE.
|
|
29
|
+
|
|
30
|
+
License-File: LICENSE
|
|
31
|
+
Keywords: automation,cli,llm,pydantic,tools,uv
|
|
32
|
+
Classifier: Development Status :: 3 - Alpha
|
|
33
|
+
Classifier: Intended Audience :: Developers
|
|
34
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
35
|
+
Classifier: Programming Language :: Python :: 3
|
|
36
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
37
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
38
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
39
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
40
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
41
|
+
Classifier: Topic :: Software Development :: Libraries
|
|
42
|
+
Classifier: Topic :: Utilities
|
|
43
|
+
Requires-Python: >=3.10
|
|
44
|
+
Description-Content-Type: text/markdown
|
|
45
|
+
|
|
46
|
+
# supypowers
|
|
47
|
+
|
|
48
|
+
Run self-contained Python scripts (with `uv` script dependencies) as callable, schema-documented functions.
|
|
49
|
+
|
|
50
|
+
## Install
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
pip install supypowers
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
Note: the CLI expects [`uv`](https://github.com/astral-sh/uv) to be installed on your machine (it shells out to `uv run`).
|
|
57
|
+
|
|
58
|
+
## Release to PyPI (recommended: GitHub Actions)
|
|
59
|
+
|
|
60
|
+
- Create a PyPI project using **Trusted Publishing** for your GitHub repo.
|
|
61
|
+
- Tag a release:
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
git tag v0.1.0
|
|
65
|
+
git push --tags
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
The workflow in `.github/workflows/publish.yml` will build and publish `dist/*` to PyPI.
|
|
69
|
+
|
|
70
|
+
## Run (no venv)
|
|
71
|
+
|
|
72
|
+
From this repo root:
|
|
73
|
+
|
|
74
|
+
```bash
|
|
75
|
+
uv run supypowers examples docs
|
|
76
|
+
uv run supypowers examples run exponents:compute_sqrt \"{'x': 9}\" --secrets .env --secrets API_KEY=abc
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
From anywhere:
|
|
80
|
+
|
|
81
|
+
```bash
|
|
82
|
+
uv run --project /Users/andre/ergodic/superpowers supypowers examples docs
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
## Install `supypowers` on your PATH (so you can run `supypowers ...`)
|
|
86
|
+
|
|
87
|
+
Use `uv tool install`:
|
|
88
|
+
|
|
89
|
+
```bash
|
|
90
|
+
cd /Users/andre/ergodic/superpowers
|
|
91
|
+
uv tool install --editable .
|
|
92
|
+
uv tool update-shell
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
Then open a new shell (or reload your shell config) and run:
|
|
96
|
+
|
|
97
|
+
```bash
|
|
98
|
+
supypowers examples docs
|
|
99
|
+
supypowers examples run exponents:compute_sqrt \"{'x': 9}\"
|
|
100
|
+
```
|
|
101
|
+
|
|
102
|
+
## Example scripts
|
|
103
|
+
|
|
104
|
+
See `examples/` (uses `uv` script metadata + Pydantic models).
|
|
105
|
+
|
|
106
|
+
## Notes
|
|
107
|
+
|
|
108
|
+
- The CLI executes target scripts **only** via `uv run <script.py> ...` (no imports into the CLI process).
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
# supypowers
|
|
2
|
+
|
|
3
|
+
Run self-contained Python scripts (with `uv` script dependencies) as callable, schema-documented functions.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install supypowers
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
Note: the CLI expects [`uv`](https://github.com/astral-sh/uv) to be installed on your machine (it shells out to `uv run`).
|
|
12
|
+
|
|
13
|
+
## Release to PyPI (recommended: GitHub Actions)
|
|
14
|
+
|
|
15
|
+
- Create a PyPI project using **Trusted Publishing** for your GitHub repo.
|
|
16
|
+
- Tag a release:
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
git tag v0.1.0
|
|
20
|
+
git push --tags
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
The workflow in `.github/workflows/publish.yml` will build and publish `dist/*` to PyPI.
|
|
24
|
+
|
|
25
|
+
## Run (no venv)
|
|
26
|
+
|
|
27
|
+
From this repo root:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
uv run supypowers examples docs
|
|
31
|
+
uv run supypowers examples run exponents:compute_sqrt \"{'x': 9}\" --secrets .env --secrets API_KEY=abc
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
From anywhere:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
uv run --project /Users/andre/ergodic/superpowers supypowers examples docs
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Install `supypowers` on your PATH (so you can run `supypowers ...`)
|
|
41
|
+
|
|
42
|
+
Use `uv tool install`:
|
|
43
|
+
|
|
44
|
+
```bash
|
|
45
|
+
cd /Users/andre/ergodic/superpowers
|
|
46
|
+
uv tool install --editable .
|
|
47
|
+
uv tool update-shell
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
Then open a new shell (or reload your shell config) and run:
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
supypowers examples docs
|
|
54
|
+
supypowers examples run exponents:compute_sqrt \"{'x': 9}\"
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
## Example scripts
|
|
58
|
+
|
|
59
|
+
See `examples/` (uses `uv` script metadata + Pydantic models).
|
|
60
|
+
|
|
61
|
+
## Notes
|
|
62
|
+
|
|
63
|
+
- The CLI executes target scripts **only** via `uv run <script.py> ...` (no imports into the CLI process).
|
|
File without changes
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# /// script
|
|
2
|
+
# dependencies = [
|
|
3
|
+
# "pydantic",
|
|
4
|
+
# ]
|
|
5
|
+
# ///
|
|
6
|
+
|
|
7
|
+
from datetime import date, timedelta
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class AddDaysInput(BaseModel):
|
|
13
|
+
d: date = Field(..., description="Date (YYYY-MM-DD).")
|
|
14
|
+
days: int = Field(..., description="Days to add (can be negative).")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class AddDaysOutput(BaseModel):
|
|
18
|
+
result: date = Field(..., description="Resulting date (YYYY-MM-DD).")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def add_days(input: AddDaysInput) -> AddDaysOutput:
|
|
22
|
+
"""Add (or subtract) a number of days from a date."""
|
|
23
|
+
return AddDaysOutput(result=input.d + timedelta(days=input.days))
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class DaysBetweenInput(BaseModel):
|
|
27
|
+
start: date = Field(..., description="Start date (YYYY-MM-DD).")
|
|
28
|
+
end: date = Field(..., description="End date (YYYY-MM-DD).")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class DaysBetweenOutput(BaseModel):
|
|
32
|
+
days: int = Field(..., description="Number of days between end and start.")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def days_between(input: DaysBetweenInput) -> DaysBetweenOutput:
|
|
36
|
+
"""Compute the day delta between two dates (end - start)."""
|
|
37
|
+
return DaysBetweenOutput(days=(input.end - input.start).days)
|
|
38
|
+
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
# /// script
|
|
2
|
+
# dependencies = [
|
|
3
|
+
# "pydantic",
|
|
4
|
+
# ]
|
|
5
|
+
# ///
|
|
6
|
+
|
|
7
|
+
import sys
|
|
8
|
+
from math import sqrt
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ComputeSqrtInput(BaseModel):
|
|
13
|
+
x: float = Field(..., description="The number to compute the square root of.")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ComputeSqrtOutput(BaseModel):
|
|
17
|
+
result: float = Field(..., description="The square root of the number.")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def compute_sqrt(input: ComputeSqrtInput) -> ComputeSqrtOutput:
|
|
21
|
+
"""This function computes the square root of a number."""
|
|
22
|
+
return ComputeSqrtOutput(result=sqrt(input.x))
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ComputeDifferentPowerInput(BaseModel):
|
|
26
|
+
x: float = Field(..., description="The number to compute the different powers of.")
|
|
27
|
+
n: int = Field(..., description="The power to compute.")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class ComputeDifferentPowerOutput(BaseModel):
|
|
31
|
+
result: float = Field(..., description="The different powers of the number.")
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def compute_different_power(
|
|
35
|
+
input: ComputeDifferentPowerInput,
|
|
36
|
+
) -> ComputeDifferentPowerOutput:
|
|
37
|
+
"""This function computes the different powers of a number."""
|
|
38
|
+
return ComputeDifferentPowerOutput(result=input.x**input.n)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
if __name__ == "__main__":
|
|
42
|
+
print(sys.argv)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# /// script
|
|
2
|
+
# dependencies = [
|
|
3
|
+
# "pydantic",
|
|
4
|
+
# ]
|
|
5
|
+
# ///
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class EchoInput(BaseModel):
|
|
13
|
+
message: str = Field(..., description="Message to echo back.")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def echo(input: EchoInput) -> str:
|
|
17
|
+
"""Return a plain string (non-Pydantic output)."""
|
|
18
|
+
return input.message
|
|
19
|
+
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# /// script
|
|
2
|
+
# dependencies = [
|
|
3
|
+
# "pydantic",
|
|
4
|
+
# ]
|
|
5
|
+
# ///
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class ReverseStringInput(BaseModel):
|
|
13
|
+
s: str = Field(..., description="String to reverse.")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ReverseStringOutput(BaseModel):
|
|
17
|
+
result: str = Field(..., description="The reversed string.")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def reverse_string(input: ReverseStringInput) -> ReverseStringOutput:
|
|
21
|
+
"""Reverse a string."""
|
|
22
|
+
return ReverseStringOutput(result=input.s[::-1])
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class CountVowelsInput(BaseModel):
|
|
26
|
+
s: str = Field(..., description="String to count vowels in.")
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class CountVowelsOutput(BaseModel):
|
|
30
|
+
count: int = Field(..., description="Number of vowels in the string.")
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def count_vowels(input: CountVowelsInput) -> CountVowelsOutput:
|
|
34
|
+
"""Count vowels in a string."""
|
|
35
|
+
vowels = set("aeiouAEIOU")
|
|
36
|
+
return CountVowelsOutput(count=sum(1 for ch in input.s if ch in vowels))
|
|
37
|
+
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# Supypowers
|
|
2
|
+
|
|
3
|
+
1. What is a superpower?
|
|
4
|
+
|
|
5
|
+
Supypowers are essentially tools for LLMs, we want to create a library of functions that:
|
|
6
|
+
|
|
7
|
+
- are self contained within a python script
|
|
8
|
+
- can be executed using a CLI
|
|
9
|
+
- don't require a server to be open
|
|
10
|
+
|
|
11
|
+
A superpower is a python script that has the following properties:
|
|
12
|
+
|
|
13
|
+
- One (or more functions) that accept a documented set of inputs (validated using a serialization library such as Pydantic)
|
|
14
|
+
- Validated and documented set of outputs
|
|
15
|
+
- It can be run without any venv being defined using uv's dependencies
|
|
16
|
+
|
|
17
|
+
for example, in examples/exponents.py we have an example of how I want these functions to look like.
|
|
18
|
+
|
|
19
|
+
2. What do we need to build?
|
|
20
|
+
|
|
21
|
+
a) a CLI like
|
|
22
|
+
|
|
23
|
+
`supypowers <folder_name> run <script>:<function> <input_json> --secrets <env_file_or_secrets>`
|
|
24
|
+
|
|
25
|
+
`supypowers examples run exponents:compute_sqrt "{a:1}" ` which executes the function in the script. It should use uv's dependencies using uv's scripts framework
|
|
26
|
+
|
|
27
|
+
b) a way to get the documentation out of every function that's accessible within the folder.. essentially I want a context to pass to the LLM. Each function should come with:
|
|
28
|
+
|
|
29
|
+
- function name
|
|
30
|
+
- function description
|
|
31
|
+
- input schema
|
|
32
|
+
- (optional) output schema or any
|
|
33
|
+
|
|
34
|
+
b) possible decorators that need to go into the scripts to make life simpler (like defining requirements, defining documentations, etc)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "supypowers"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Run self-contained uv Python scripts as callable, schema-documented 'superpowers'."
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
requires-python = ">=3.10"
|
|
7
|
+
license = { file = "LICENSE" }
|
|
8
|
+
authors = [{ name = "Andre", email = "" }]
|
|
9
|
+
keywords = ["uv", "cli", "pydantic", "llm", "tools", "automation"]
|
|
10
|
+
classifiers = [
|
|
11
|
+
"Development Status :: 3 - Alpha",
|
|
12
|
+
"Intended Audience :: Developers",
|
|
13
|
+
"License :: OSI Approved :: MIT License",
|
|
14
|
+
"Programming Language :: Python :: 3",
|
|
15
|
+
"Programming Language :: Python :: 3 :: Only",
|
|
16
|
+
"Programming Language :: Python :: 3.10",
|
|
17
|
+
"Programming Language :: Python :: 3.11",
|
|
18
|
+
"Programming Language :: Python :: 3.12",
|
|
19
|
+
"Programming Language :: Python :: 3.13",
|
|
20
|
+
"Topic :: Software Development :: Libraries",
|
|
21
|
+
"Topic :: Utilities",
|
|
22
|
+
]
|
|
23
|
+
dependencies = []
|
|
24
|
+
|
|
25
|
+
[project.urls]
|
|
26
|
+
Homepage = "https://github.com/ergodic-ai/supypowers"
|
|
27
|
+
Repository = "https://github.com/ergodic-ai/supypowers"
|
|
28
|
+
|
|
29
|
+
[project.scripts]
|
|
30
|
+
supypowers = "supypowers.cli:app"
|
|
31
|
+
|
|
32
|
+
[build-system]
|
|
33
|
+
requires = ["hatchling>=1.24.2"]
|
|
34
|
+
build-backend = "hatchling.build"
|
|
35
|
+
|
|
36
|
+
[tool.hatch.build.targets.wheel]
|
|
37
|
+
packages = ["src/supypowers"]
|
|
38
|
+
|
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import json
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from supypowers.uv_exec import UVRunError, uv_run_python_code
|
|
9
|
+
from supypowers.util import parse_secrets_args, resolve_script_path
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def app() -> None:
|
|
13
|
+
parser = argparse.ArgumentParser(prog="supypowers")
|
|
14
|
+
parser.add_argument("folder", type=Path, help="Folder containing scripts")
|
|
15
|
+
sub = parser.add_subparsers(dest="command", required=True)
|
|
16
|
+
|
|
17
|
+
run_p = sub.add_parser("run", help="Run a function in a script via `uv run`")
|
|
18
|
+
run_p.add_argument("target", type=str, help="script:function (script may omit .py)")
|
|
19
|
+
run_p.add_argument("input_data", type=str, help="Input data (JSON or Python-literal-ish)")
|
|
20
|
+
run_p.add_argument(
|
|
21
|
+
"--secrets",
|
|
22
|
+
action="append",
|
|
23
|
+
default=[],
|
|
24
|
+
help="Secrets as a .env path or inline KEY=VAL. May be provided multiple times.",
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
docs_p = sub.add_parser("docs", help="Emit docs JSON or Markdown for discovered functions")
|
|
28
|
+
docs_p.add_argument("--recursive", action="store_true", help="Recurse into subfolders")
|
|
29
|
+
docs_p.add_argument(
|
|
30
|
+
"--format",
|
|
31
|
+
choices=["json", "md"],
|
|
32
|
+
default="json",
|
|
33
|
+
help="Output format (json or md).",
|
|
34
|
+
)
|
|
35
|
+
docs_p.add_argument(
|
|
36
|
+
"--output",
|
|
37
|
+
type=Path,
|
|
38
|
+
default=None,
|
|
39
|
+
help="Write output to a file instead of stdout.",
|
|
40
|
+
)
|
|
41
|
+
docs_p.add_argument(
|
|
42
|
+
"--require-marker",
|
|
43
|
+
action="store_true",
|
|
44
|
+
help="Only include functions explicitly marked (currently: decorator named `superpower`).",
|
|
45
|
+
)
|
|
46
|
+
docs_p.add_argument(
|
|
47
|
+
"--secrets",
|
|
48
|
+
action="append",
|
|
49
|
+
default=[],
|
|
50
|
+
help="Secrets as a .env path or inline KEY=VAL. May be provided multiple times.",
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
args = parser.parse_args()
|
|
54
|
+
|
|
55
|
+
if args.command == "run":
|
|
56
|
+
_cmd_run(args.folder, args.target, args.input_data, args.secrets)
|
|
57
|
+
return
|
|
58
|
+
if args.command == "docs":
|
|
59
|
+
_cmd_docs(
|
|
60
|
+
args.folder,
|
|
61
|
+
args.recursive,
|
|
62
|
+
args.require_marker,
|
|
63
|
+
args.secrets,
|
|
64
|
+
args.format,
|
|
65
|
+
args.output,
|
|
66
|
+
)
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
parser.error("unknown command")
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _cmd_run(folder: Path, target: str, input_data: str, secrets: list[str]) -> None:
|
|
73
|
+
if not folder.exists() or not folder.is_dir():
|
|
74
|
+
print(json.dumps({"ok": False, "error": f"folder not found: {folder}"}))
|
|
75
|
+
raise SystemExit(2)
|
|
76
|
+
|
|
77
|
+
script_name, _, func_name = target.partition(":")
|
|
78
|
+
if not script_name or not func_name:
|
|
79
|
+
print(json.dumps({"ok": False, "error": "target must be in the form script:function"}))
|
|
80
|
+
raise SystemExit(2)
|
|
81
|
+
|
|
82
|
+
script_path = resolve_script_path(folder, script_name)
|
|
83
|
+
env = parse_secrets_args(secrets or [])
|
|
84
|
+
|
|
85
|
+
payload = {
|
|
86
|
+
"script_path": str(script_path),
|
|
87
|
+
"function_name": func_name,
|
|
88
|
+
"input_data": input_data,
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
try:
|
|
92
|
+
out = uv_run_python_code(
|
|
93
|
+
script_path=script_path,
|
|
94
|
+
code=_RUNNER_CODE,
|
|
95
|
+
payload=payload,
|
|
96
|
+
extra_env=env,
|
|
97
|
+
)
|
|
98
|
+
except UVRunError as e:
|
|
99
|
+
if e.stderr:
|
|
100
|
+
sys.stderr.write(e.stderr + "\n")
|
|
101
|
+
print(
|
|
102
|
+
json.dumps(
|
|
103
|
+
{
|
|
104
|
+
"ok": False,
|
|
105
|
+
"error": e.message,
|
|
106
|
+
"exit_code": e.exit_code,
|
|
107
|
+
"uv_stdout": e.stdout,
|
|
108
|
+
"uv_stderr": e.stderr,
|
|
109
|
+
}
|
|
110
|
+
)
|
|
111
|
+
)
|
|
112
|
+
raise SystemExit(e.exit_code)
|
|
113
|
+
|
|
114
|
+
try:
|
|
115
|
+
parsed = json.loads(out)
|
|
116
|
+
except Exception:
|
|
117
|
+
print(json.dumps({"ok": False, "error": "runner did not emit valid JSON", "raw": out}))
|
|
118
|
+
raise SystemExit(1)
|
|
119
|
+
|
|
120
|
+
print(json.dumps(parsed, ensure_ascii=False))
|
|
121
|
+
raise SystemExit(0 if parsed.get("ok") else 1)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _cmd_docs(
|
|
125
|
+
folder: Path,
|
|
126
|
+
recursive: bool,
|
|
127
|
+
require_marker: bool,
|
|
128
|
+
secrets: list[str],
|
|
129
|
+
out_format: str,
|
|
130
|
+
output_path: Path | None,
|
|
131
|
+
) -> None:
|
|
132
|
+
if not folder.exists() or not folder.is_dir():
|
|
133
|
+
print(json.dumps({"ok": False, "error": f"folder not found: {folder}"}))
|
|
134
|
+
raise SystemExit(2)
|
|
135
|
+
|
|
136
|
+
env = parse_secrets_args(secrets or [])
|
|
137
|
+
|
|
138
|
+
scripts = (
|
|
139
|
+
sorted(p for p in folder.rglob("*.py") if p.is_file())
|
|
140
|
+
if recursive
|
|
141
|
+
else sorted(p for p in folder.glob("*.py") if p.is_file())
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
docs_out: list[dict] = []
|
|
145
|
+
for script_path in scripts:
|
|
146
|
+
payload = {"script_path": str(script_path), "require_marker": require_marker}
|
|
147
|
+
try:
|
|
148
|
+
out = uv_run_python_code(
|
|
149
|
+
script_path=script_path,
|
|
150
|
+
code=_DOCS_CODE,
|
|
151
|
+
payload=payload,
|
|
152
|
+
extra_env=env,
|
|
153
|
+
)
|
|
154
|
+
docs_out.append(json.loads(out))
|
|
155
|
+
except Exception as e:
|
|
156
|
+
docs_out.append({"script": str(script_path), "error": str(e), "functions": []})
|
|
157
|
+
|
|
158
|
+
if out_format == "json":
|
|
159
|
+
rendered = json.dumps(docs_out, ensure_ascii=False)
|
|
160
|
+
else:
|
|
161
|
+
rendered = _docs_to_markdown(docs_out)
|
|
162
|
+
|
|
163
|
+
if output_path is not None:
|
|
164
|
+
output_path.write_text(rendered + "\n", encoding="utf-8")
|
|
165
|
+
else:
|
|
166
|
+
print(rendered)
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def _docs_to_markdown(docs_out: list[dict]) -> str:
|
|
170
|
+
lines: list[str] = []
|
|
171
|
+
lines.append("## Supypowers\n")
|
|
172
|
+
for item in docs_out:
|
|
173
|
+
script = item.get("script", "")
|
|
174
|
+
err = item.get("error")
|
|
175
|
+
lines.append(f"### `{script}`\n")
|
|
176
|
+
if err:
|
|
177
|
+
lines.append(f"**Error:** `{err}`\n")
|
|
178
|
+
continue
|
|
179
|
+
fns = item.get("functions") or []
|
|
180
|
+
if not fns:
|
|
181
|
+
lines.append("_No supypowers found._\n")
|
|
182
|
+
continue
|
|
183
|
+
for fn in fns:
|
|
184
|
+
name = fn.get("name", "")
|
|
185
|
+
desc = (fn.get("description") or "").strip()
|
|
186
|
+
lines.append(f"#### `{name}`\n")
|
|
187
|
+
if desc:
|
|
188
|
+
lines.append(desc + "\n")
|
|
189
|
+
in_schema = fn.get("input_schema")
|
|
190
|
+
out_schema = fn.get("output_schema")
|
|
191
|
+
lines.append("**Input schema**\n")
|
|
192
|
+
lines.append("```json")
|
|
193
|
+
lines.append(json.dumps(in_schema, ensure_ascii=False, indent=2))
|
|
194
|
+
lines.append("```\n")
|
|
195
|
+
lines.append("**Output schema**\n")
|
|
196
|
+
lines.append("```json")
|
|
197
|
+
lines.append(json.dumps(out_schema, ensure_ascii=False, indent=2))
|
|
198
|
+
lines.append("```\n")
|
|
199
|
+
return "\n".join(lines).rstrip() + "\n"
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
_RUNNER_CODE = r"""
|
|
203
|
+
import ast
|
|
204
|
+
import importlib.util
|
|
205
|
+
import inspect
|
|
206
|
+
import json
|
|
207
|
+
import sys
|
|
208
|
+
import typing
|
|
209
|
+
|
|
210
|
+
def _parse_input(s):
|
|
211
|
+
s = s.strip()
|
|
212
|
+
try:
|
|
213
|
+
return json.loads(s)
|
|
214
|
+
except Exception:
|
|
215
|
+
pass
|
|
216
|
+
# "YAML-ish" best-effort: accept Python literals (e.g. {'x': 1}, [1,2], True, None).
|
|
217
|
+
try:
|
|
218
|
+
return ast.literal_eval(s)
|
|
219
|
+
except Exception:
|
|
220
|
+
raise ValueError("input_data must be valid JSON or a Python-literal-ish value")
|
|
221
|
+
|
|
222
|
+
def _load_module_from_path(path):
|
|
223
|
+
spec = importlib.util.spec_from_file_location("__supypowers_target__", path)
|
|
224
|
+
mod = importlib.util.module_from_spec(spec)
|
|
225
|
+
spec.loader.exec_module(mod)
|
|
226
|
+
return mod
|
|
227
|
+
|
|
228
|
+
def _is_pydantic_model(cls):
|
|
229
|
+
try:
|
|
230
|
+
from pydantic import BaseModel
|
|
231
|
+
return isinstance(cls, type) and issubclass(cls, BaseModel)
|
|
232
|
+
except Exception:
|
|
233
|
+
return False
|
|
234
|
+
|
|
235
|
+
def _resolved_type_hints(fn, mod):
|
|
236
|
+
try:
|
|
237
|
+
return typing.get_type_hints(fn, globalns=vars(mod), localns=vars(mod))
|
|
238
|
+
except Exception:
|
|
239
|
+
return {}
|
|
240
|
+
|
|
241
|
+
def _model_to_jsonable(obj):
|
|
242
|
+
# Pydantic v2: model_dump(); v1: dict()
|
|
243
|
+
if hasattr(obj, "model_dump"):
|
|
244
|
+
try:
|
|
245
|
+
return obj.model_dump(mode="json")
|
|
246
|
+
except Exception:
|
|
247
|
+
return obj.model_dump()
|
|
248
|
+
if hasattr(obj, "dict"):
|
|
249
|
+
return obj.dict()
|
|
250
|
+
return obj
|
|
251
|
+
|
|
252
|
+
def main():
|
|
253
|
+
payload = json.loads(sys.stdin.read())
|
|
254
|
+
script_path = payload["script_path"]
|
|
255
|
+
fn_name = payload["function_name"]
|
|
256
|
+
input_data = payload["input_data"]
|
|
257
|
+
|
|
258
|
+
mod = _load_module_from_path(script_path)
|
|
259
|
+
fn = getattr(mod, fn_name, None)
|
|
260
|
+
if fn is None or not callable(fn):
|
|
261
|
+
print(json.dumps({"ok": False, "error": f"function not found: {fn_name}"}))
|
|
262
|
+
return 2
|
|
263
|
+
|
|
264
|
+
sig = inspect.signature(fn)
|
|
265
|
+
params = list(sig.parameters.values())
|
|
266
|
+
if len(params) != 1:
|
|
267
|
+
print(json.dumps({"ok": False, "error": "function must accept exactly one parameter named `input`"}))
|
|
268
|
+
return 2
|
|
269
|
+
|
|
270
|
+
param = params[0]
|
|
271
|
+
if param.name != "input":
|
|
272
|
+
print(json.dumps({"ok": False, "error": "function parameter must be named `input`"}))
|
|
273
|
+
return 2
|
|
274
|
+
|
|
275
|
+
hints = _resolved_type_hints(fn, mod)
|
|
276
|
+
ann = hints.get(param.name, param.annotation)
|
|
277
|
+
raw = _parse_input(input_data)
|
|
278
|
+
|
|
279
|
+
try:
|
|
280
|
+
if not _is_pydantic_model(ann):
|
|
281
|
+
print(json.dumps({"ok": False, "error": "input must be a Pydantic BaseModel type annotation"}))
|
|
282
|
+
return 2
|
|
283
|
+
if not isinstance(raw, dict):
|
|
284
|
+
print(json.dumps({"ok": False, "error": "input_data must be an object mapping for the input model"}))
|
|
285
|
+
return 2
|
|
286
|
+
inp = ann.model_validate(raw) if hasattr(ann, "model_validate") else ann.parse_obj(raw)
|
|
287
|
+
result = fn(inp)
|
|
288
|
+
out = _model_to_jsonable(result)
|
|
289
|
+
try:
|
|
290
|
+
json.dumps(out)
|
|
291
|
+
except Exception:
|
|
292
|
+
out = str(out)
|
|
293
|
+
print(json.dumps({"ok": True, "data": out}, ensure_ascii=False))
|
|
294
|
+
return 0
|
|
295
|
+
except Exception as e:
|
|
296
|
+
print(json.dumps({"ok": False, "error": str(e)}, ensure_ascii=False))
|
|
297
|
+
return 1
|
|
298
|
+
|
|
299
|
+
if __name__ == "__main__":
|
|
300
|
+
sys.exit(main())
|
|
301
|
+
"""
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
_DOCS_CODE = r"""
|
|
305
|
+
import ast
|
|
306
|
+
import importlib.util
|
|
307
|
+
import inspect
|
|
308
|
+
import json
|
|
309
|
+
import sys
|
|
310
|
+
import typing
|
|
311
|
+
|
|
312
|
+
def _load_module_from_path(path):
|
|
313
|
+
spec = importlib.util.spec_from_file_location("__supypowers_target__", path)
|
|
314
|
+
mod = importlib.util.module_from_spec(spec)
|
|
315
|
+
spec.loader.exec_module(mod)
|
|
316
|
+
return mod
|
|
317
|
+
|
|
318
|
+
def _is_pydantic_model(cls):
|
|
319
|
+
try:
|
|
320
|
+
from pydantic import BaseModel
|
|
321
|
+
return isinstance(cls, type) and issubclass(cls, BaseModel)
|
|
322
|
+
except Exception:
|
|
323
|
+
return False
|
|
324
|
+
|
|
325
|
+
def _resolved_type_hints(fn, mod):
|
|
326
|
+
try:
|
|
327
|
+
return typing.get_type_hints(fn, globalns=vars(mod), localns=vars(mod))
|
|
328
|
+
except Exception:
|
|
329
|
+
return {}
|
|
330
|
+
|
|
331
|
+
def _schema_for_model(model_cls):
|
|
332
|
+
try:
|
|
333
|
+
return model_cls.model_json_schema()
|
|
334
|
+
except Exception:
|
|
335
|
+
try:
|
|
336
|
+
return model_cls.schema()
|
|
337
|
+
except Exception:
|
|
338
|
+
return None
|
|
339
|
+
|
|
340
|
+
def _has_superpower_decorator(script_path, fn_name):
|
|
341
|
+
try:
|
|
342
|
+
src = open(script_path, "r", encoding="utf-8").read()
|
|
343
|
+
tree = ast.parse(src)
|
|
344
|
+
for node in tree.body:
|
|
345
|
+
if isinstance(node, ast.FunctionDef) and node.name == fn_name:
|
|
346
|
+
for dec in node.decorator_list:
|
|
347
|
+
if isinstance(dec, ast.Name) and dec.id == "superpower":
|
|
348
|
+
return True
|
|
349
|
+
if isinstance(dec, ast.Attribute) and dec.attr == "superpower":
|
|
350
|
+
return True
|
|
351
|
+
return False
|
|
352
|
+
except Exception:
|
|
353
|
+
return False
|
|
354
|
+
|
|
355
|
+
def main():
|
|
356
|
+
payload = json.loads(sys.stdin.read())
|
|
357
|
+
script_path = payload["script_path"]
|
|
358
|
+
require_marker = bool(payload.get("require_marker"))
|
|
359
|
+
|
|
360
|
+
mod = _load_module_from_path(script_path)
|
|
361
|
+
|
|
362
|
+
fns = []
|
|
363
|
+
for name, obj in sorted(vars(mod).items()):
|
|
364
|
+
if name.startswith("_"):
|
|
365
|
+
continue
|
|
366
|
+
if not callable(obj):
|
|
367
|
+
continue
|
|
368
|
+
try:
|
|
369
|
+
sig = inspect.signature(obj)
|
|
370
|
+
except Exception:
|
|
371
|
+
continue
|
|
372
|
+
params = list(sig.parameters.values())
|
|
373
|
+
if len(params) != 1:
|
|
374
|
+
continue
|
|
375
|
+
if params[0].name != "input":
|
|
376
|
+
continue
|
|
377
|
+
hints = _resolved_type_hints(obj, mod)
|
|
378
|
+
ann_in = hints.get(params[0].name, params[0].annotation)
|
|
379
|
+
if require_marker and not _has_superpower_decorator(script_path, name):
|
|
380
|
+
continue
|
|
381
|
+
if not _is_pydantic_model(ann_in):
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
ann_out = hints.get("return", sig.return_annotation)
|
|
385
|
+
fns.append({
|
|
386
|
+
"name": name,
|
|
387
|
+
"description": inspect.getdoc(obj) or "",
|
|
388
|
+
"input_schema": _schema_for_model(ann_in) if _is_pydantic_model(ann_in) else None,
|
|
389
|
+
"output_schema": _schema_for_model(ann_out) if _is_pydantic_model(ann_out) else None,
|
|
390
|
+
})
|
|
391
|
+
|
|
392
|
+
print(json.dumps({"script": script_path, "functions": fns}, ensure_ascii=False))
|
|
393
|
+
return 0
|
|
394
|
+
|
|
395
|
+
if __name__ == "__main__":
|
|
396
|
+
sys.exit(main())
|
|
397
|
+
"""
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
if __name__ == "__main__":
|
|
401
|
+
app()
|
|
402
|
+
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Dict, Iterable
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def resolve_script_path(folder: Path, script_name: str) -> Path:
|
|
8
|
+
"""
|
|
9
|
+
Resolve a script name (with or without .py) within a folder.
|
|
10
|
+
"""
|
|
11
|
+
name = script_name if script_name.endswith(".py") else f"{script_name}.py"
|
|
12
|
+
p = (folder / name).resolve()
|
|
13
|
+
if not p.exists() or not p.is_file():
|
|
14
|
+
raise FileNotFoundError(f"Script not found: {p}")
|
|
15
|
+
return p
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _parse_dotenv(text: str) -> Dict[str, str]:
|
|
19
|
+
env: Dict[str, str] = {}
|
|
20
|
+
for line in text.splitlines():
|
|
21
|
+
line = line.strip()
|
|
22
|
+
if not line or line.startswith("#"):
|
|
23
|
+
continue
|
|
24
|
+
if line.startswith("export "):
|
|
25
|
+
line = line[len("export ") :].strip()
|
|
26
|
+
if "=" not in line:
|
|
27
|
+
continue
|
|
28
|
+
k, v = line.split("=", 1)
|
|
29
|
+
k = k.strip()
|
|
30
|
+
v = v.strip().strip('"').strip("'")
|
|
31
|
+
if k:
|
|
32
|
+
env[k] = v
|
|
33
|
+
return env
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def parse_secrets_args(secrets_args: Iterable[str]) -> Dict[str, str]:
|
|
37
|
+
"""
|
|
38
|
+
Accept secrets as either:
|
|
39
|
+
- a path to a dotenv file
|
|
40
|
+
- inline KEY=VAL
|
|
41
|
+
"""
|
|
42
|
+
out: Dict[str, str] = {}
|
|
43
|
+
for s in secrets_args:
|
|
44
|
+
if "=" in s and not Path(s).exists():
|
|
45
|
+
k, v = s.split("=", 1)
|
|
46
|
+
out[k] = v
|
|
47
|
+
continue
|
|
48
|
+
p = Path(s)
|
|
49
|
+
if p.exists() and p.is_file():
|
|
50
|
+
out.update(_parse_dotenv(p.read_text(encoding="utf-8")))
|
|
51
|
+
continue
|
|
52
|
+
# Fall back: if it contains '=', treat as inline even if the path doesn't exist.
|
|
53
|
+
if "=" in s:
|
|
54
|
+
k, v = s.split("=", 1)
|
|
55
|
+
out[k] = v
|
|
56
|
+
continue
|
|
57
|
+
raise ValueError(f"--secrets value must be a .env path or KEY=VAL, got: {s}")
|
|
58
|
+
return out
|
|
59
|
+
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import subprocess
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, Optional
|
|
9
|
+
|
|
10
|
+
from supypowers.uv_script_metadata import read_uv_script_dependencies
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass(frozen=True)
|
|
14
|
+
class UVRunError(Exception):
|
|
15
|
+
message: str
|
|
16
|
+
exit_code: int
|
|
17
|
+
stdout: str
|
|
18
|
+
stderr: str
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def uv_run_python_code(
|
|
22
|
+
*,
|
|
23
|
+
script_path: Path,
|
|
24
|
+
code: str,
|
|
25
|
+
payload: dict,
|
|
26
|
+
extra_env: Optional[Dict[str, str]] = None,
|
|
27
|
+
quiet: bool = True,
|
|
28
|
+
) -> str:
|
|
29
|
+
"""
|
|
30
|
+
Execute `python -c <code>` in a uv environment built from `script_path` inline dependencies,
|
|
31
|
+
sending `payload` via stdin and returning stdout.
|
|
32
|
+
"""
|
|
33
|
+
env = os.environ.copy()
|
|
34
|
+
if extra_env:
|
|
35
|
+
env.update(extra_env)
|
|
36
|
+
|
|
37
|
+
deps = read_uv_script_dependencies(script_path)
|
|
38
|
+
|
|
39
|
+
cmd = ["uv", "run", "--no-project"]
|
|
40
|
+
if quiet:
|
|
41
|
+
cmd.extend(["-q", "--no-progress"])
|
|
42
|
+
for dep in deps:
|
|
43
|
+
cmd.extend(["--with", dep])
|
|
44
|
+
cmd.extend(["python", "-c", code])
|
|
45
|
+
|
|
46
|
+
proc = subprocess.run(
|
|
47
|
+
cmd,
|
|
48
|
+
input=json.dumps(payload).encode("utf-8"),
|
|
49
|
+
stdout=subprocess.PIPE,
|
|
50
|
+
stderr=subprocess.PIPE,
|
|
51
|
+
env=env,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
stdout = proc.stdout.decode("utf-8", errors="replace").strip()
|
|
55
|
+
stderr = proc.stderr.decode("utf-8", errors="replace").strip()
|
|
56
|
+
|
|
57
|
+
if proc.returncode != 0:
|
|
58
|
+
raise UVRunError(
|
|
59
|
+
message=f"`uv run` failed with exit code {proc.returncode}",
|
|
60
|
+
exit_code=proc.returncode,
|
|
61
|
+
stdout=stdout,
|
|
62
|
+
stderr=stderr,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
return stdout
|
|
66
|
+
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import List
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def read_uv_script_dependencies(script_path: Path) -> List[str]:
|
|
9
|
+
"""
|
|
10
|
+
Parse `uv` inline script metadata and return its dependency strings.
|
|
11
|
+
|
|
12
|
+
Supports the common form:
|
|
13
|
+
|
|
14
|
+
# /// script
|
|
15
|
+
# dependencies = [
|
|
16
|
+
# "pydantic",
|
|
17
|
+
# ]
|
|
18
|
+
# ///
|
|
19
|
+
"""
|
|
20
|
+
try:
|
|
21
|
+
text = script_path.read_text(encoding="utf-8")
|
|
22
|
+
except Exception:
|
|
23
|
+
return []
|
|
24
|
+
|
|
25
|
+
lines = text.splitlines()
|
|
26
|
+
start = None
|
|
27
|
+
end = None
|
|
28
|
+
for i, line in enumerate(lines):
|
|
29
|
+
if line.strip() == "# /// script":
|
|
30
|
+
start = i + 1
|
|
31
|
+
continue
|
|
32
|
+
if start is not None and line.strip() == "# ///":
|
|
33
|
+
end = i
|
|
34
|
+
break
|
|
35
|
+
|
|
36
|
+
if start is None or end is None or end <= start:
|
|
37
|
+
return []
|
|
38
|
+
|
|
39
|
+
# Strip leading comment markers.
|
|
40
|
+
meta_lines = []
|
|
41
|
+
for raw in lines[start:end]:
|
|
42
|
+
s = raw.lstrip()
|
|
43
|
+
if s.startswith("#"):
|
|
44
|
+
s = s[1:]
|
|
45
|
+
if s.startswith(" "):
|
|
46
|
+
s = s[1:]
|
|
47
|
+
meta_lines.append(s)
|
|
48
|
+
|
|
49
|
+
meta_src = "\n".join(meta_lines).strip()
|
|
50
|
+
if not meta_src:
|
|
51
|
+
return []
|
|
52
|
+
|
|
53
|
+
# Parse assignments in the metadata block.
|
|
54
|
+
try:
|
|
55
|
+
tree = ast.parse(meta_src, mode="exec")
|
|
56
|
+
except Exception:
|
|
57
|
+
return []
|
|
58
|
+
|
|
59
|
+
deps: List[str] = []
|
|
60
|
+
for node in tree.body:
|
|
61
|
+
if not isinstance(node, ast.Assign) or len(node.targets) != 1:
|
|
62
|
+
continue
|
|
63
|
+
target = node.targets[0]
|
|
64
|
+
if not isinstance(target, ast.Name) or target.id != "dependencies":
|
|
65
|
+
continue
|
|
66
|
+
try:
|
|
67
|
+
value = ast.literal_eval(node.value)
|
|
68
|
+
except Exception:
|
|
69
|
+
continue
|
|
70
|
+
if isinstance(value, list):
|
|
71
|
+
for item in value:
|
|
72
|
+
if isinstance(item, str):
|
|
73
|
+
deps.append(item)
|
|
74
|
+
break
|
|
75
|
+
|
|
76
|
+
return deps
|
|
77
|
+
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import os
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import unittest
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
ROOT = Path(__file__).resolve().parents[1]
|
|
12
|
+
EXAMPLES = ROOT / "examples"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _run_uv_superpowers(*args: str) -> dict:
|
|
16
|
+
"""
|
|
17
|
+
Run: uv run supypowers <args...>
|
|
18
|
+
and parse stdout as JSON.
|
|
19
|
+
"""
|
|
20
|
+
if shutil.which("uv") is None:
|
|
21
|
+
raise unittest.SkipTest("uv not found on PATH")
|
|
22
|
+
|
|
23
|
+
cmd = ["uv", "run", "supypowers", *args]
|
|
24
|
+
proc = subprocess.run(
|
|
25
|
+
cmd,
|
|
26
|
+
cwd=str(ROOT),
|
|
27
|
+
stdout=subprocess.PIPE,
|
|
28
|
+
stderr=subprocess.PIPE,
|
|
29
|
+
env=os.environ.copy(),
|
|
30
|
+
text=True,
|
|
31
|
+
)
|
|
32
|
+
out = proc.stdout.strip()
|
|
33
|
+
err = proc.stderr.strip()
|
|
34
|
+
if proc.returncode != 0:
|
|
35
|
+
raise AssertionError(f"command failed ({proc.returncode})\ncmd={cmd}\nstdout={out}\nstderr={err}")
|
|
36
|
+
try:
|
|
37
|
+
return json.loads(out)
|
|
38
|
+
except Exception as e:
|
|
39
|
+
raise AssertionError(f"stdout was not JSON\ncmd={cmd}\nstdout={out}\nstderr={err}\nerr={e}")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class TestCLI(unittest.TestCase):
|
|
43
|
+
def test_docs_includes_exponents(self) -> None:
|
|
44
|
+
docs = _run_uv_superpowers(str(EXAMPLES), "docs")
|
|
45
|
+
# docs is a list of {"script": ..., "functions": [...]}
|
|
46
|
+
by_script = {Path(item["script"]).name: item for item in docs}
|
|
47
|
+
self.assertIn("exponents.py", by_script)
|
|
48
|
+
fn_names = {f["name"] for f in by_script["exponents.py"]["functions"]}
|
|
49
|
+
self.assertIn("compute_sqrt", fn_names)
|
|
50
|
+
self.assertIn("compute_different_power", fn_names)
|
|
51
|
+
|
|
52
|
+
def test_docs_markdown_renders(self) -> None:
|
|
53
|
+
# Just ensure it runs and produces markdown-like output.
|
|
54
|
+
cmd = ["uv", "run", "supypowers", str(EXAMPLES), "docs", "--format", "md"]
|
|
55
|
+
proc = subprocess.run(
|
|
56
|
+
cmd,
|
|
57
|
+
cwd=str(ROOT),
|
|
58
|
+
stdout=subprocess.PIPE,
|
|
59
|
+
stderr=subprocess.PIPE,
|
|
60
|
+
env=os.environ.copy(),
|
|
61
|
+
text=True,
|
|
62
|
+
)
|
|
63
|
+
self.assertEqual(proc.returncode, 0, msg=f"stderr={proc.stderr}\nstdout={proc.stdout}")
|
|
64
|
+
self.assertIn("## Supypowers", proc.stdout)
|
|
65
|
+
self.assertIn("### `", proc.stdout)
|
|
66
|
+
|
|
67
|
+
def test_run_exponents_compute_sqrt(self) -> None:
|
|
68
|
+
out = _run_uv_superpowers(str(EXAMPLES), "run", "exponents:compute_sqrt", "{'x': 9}")
|
|
69
|
+
self.assertTrue(out["ok"])
|
|
70
|
+
self.assertEqual(out["data"]["result"], 3.0)
|
|
71
|
+
|
|
72
|
+
def test_run_strings_reverse(self) -> None:
|
|
73
|
+
out = _run_uv_superpowers(str(EXAMPLES), "run", "strings:reverse_string", "{'s': 'abc'}")
|
|
74
|
+
self.assertTrue(out["ok"])
|
|
75
|
+
self.assertEqual(out["data"]["result"], "cba")
|
|
76
|
+
|
|
77
|
+
def test_run_dates_add_days(self) -> None:
|
|
78
|
+
out = _run_uv_superpowers(
|
|
79
|
+
str(EXAMPLES),
|
|
80
|
+
"run",
|
|
81
|
+
"dates:add_days",
|
|
82
|
+
"{'d': '2025-01-01', 'days': 10}",
|
|
83
|
+
)
|
|
84
|
+
self.assertTrue(out["ok"])
|
|
85
|
+
self.assertEqual(out["data"]["result"], "2025-01-11")
|
|
86
|
+
|
|
87
|
+
def test_run_non_pydantic_output_allowed(self) -> None:
|
|
88
|
+
out = _run_uv_superpowers(str(EXAMPLES), "run", "misc:echo", "{'message': 'hi'}")
|
|
89
|
+
self.assertTrue(out["ok"])
|
|
90
|
+
self.assertEqual(out["data"], "hi")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
if __name__ == "__main__":
|
|
94
|
+
unittest.main()
|
|
95
|
+
|