langchain-anyllm 0.0.1a1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. langchain_anyllm-0.0.1a1/.github/actions/uv_setup/action.yml +18 -0
  2. langchain_anyllm-0.0.1a1/.github/scripts/check_diff.py +48 -0
  3. langchain_anyllm-0.0.1a1/.github/scripts/get_min_versions.py +186 -0
  4. langchain_anyllm-0.0.1a1/.github/workflows/_codespell.yml +39 -0
  5. langchain_anyllm-0.0.1a1/.github/workflows/_compile_integration_test.yml +52 -0
  6. langchain_anyllm-0.0.1a1/.github/workflows/_lint.yml +42 -0
  7. langchain_anyllm-0.0.1a1/.github/workflows/_release.yml +271 -0
  8. langchain_anyllm-0.0.1a1/.github/workflows/_test.yml +54 -0
  9. langchain_anyllm-0.0.1a1/.github/workflows/_test_release.yml +106 -0
  10. langchain_anyllm-0.0.1a1/.github/workflows/check_diffs.yml +101 -0
  11. langchain_anyllm-0.0.1a1/.github/workflows/extract_ignored_words_list.py +13 -0
  12. langchain_anyllm-0.0.1a1/.gitignore +60 -0
  13. langchain_anyllm-0.0.1a1/.pre-commit-config.yaml +50 -0
  14. langchain_anyllm-0.0.1a1/LICENSE +21 -0
  15. langchain_anyllm-0.0.1a1/Makefile +82 -0
  16. langchain_anyllm-0.0.1a1/PKG-INFO +185 -0
  17. langchain_anyllm-0.0.1a1/README.md +172 -0
  18. langchain_anyllm-0.0.1a1/examples/basic_usage.py +232 -0
  19. langchain_anyllm-0.0.1a1/langchain_anyllm/__init__.py +5 -0
  20. langchain_anyllm-0.0.1a1/langchain_anyllm/chat_models.py +594 -0
  21. langchain_anyllm-0.0.1a1/langchain_anyllm/py.typed +0 -0
  22. langchain_anyllm-0.0.1a1/langchain_anyllm/utils.py +335 -0
  23. langchain_anyllm-0.0.1a1/pyproject.toml +65 -0
  24. langchain_anyllm-0.0.1a1/tests/__init__.py +1 -0
  25. langchain_anyllm-0.0.1a1/tests/integration_tests/__init__.py +1 -0
  26. langchain_anyllm-0.0.1a1/tests/integration_tests/test_compile.py +13 -0
  27. langchain_anyllm-0.0.1a1/tests/integration_tests/test_standard.py +73 -0
  28. langchain_anyllm-0.0.1a1/tests/integration_tests/test_standard_anthropic.py +70 -0
  29. langchain_anyllm-0.0.1a1/tests/unit_tests/__init__.py +0 -0
  30. langchain_anyllm-0.0.1a1/tests/unit_tests/test_chat_models.py +115 -0
  31. langchain_anyllm-0.0.1a1/tests/unit_tests/test_standard.py +32 -0
  32. langchain_anyllm-0.0.1a1/tests/unit_tests/test_utils.py +104 -0
  33. langchain_anyllm-0.0.1a1/uv.lock +1581 -0
@@ -0,0 +1,18 @@
1
+ # TODO: https://docs.astral.sh/uv/guides/integration/github/#caching
2
+
3
+ name: uv-install
4
+ description: Set up Python and uv
5
+
6
+ inputs:
7
+ python-version:
8
+ description: Python version, supporting MAJOR.MINOR only
9
+ required: true
10
+
11
+ runs:
12
+ using: composite
13
+ steps:
14
+ - name: Install uv and set the python version
15
+ uses: astral-sh/setup-uv@v5
16
+ with:
17
+ version: "0.5.25"
18
+ python-version: ${{ inputs.python-version }}
@@ -0,0 +1,48 @@
1
+ import json
2
+ import sys
3
+ from typing import Dict
4
+
5
+ LIB_DIRS = ["."]
6
+
7
+ if __name__ == "__main__":
8
+ files = sys.argv[1:]
9
+
10
+ dirs_to_run: Dict[str, set] = {
11
+ "lint": set(),
12
+ "test": set(),
13
+ }
14
+
15
+ if len(files) == 300:
16
+ # max diff length is 300 files - there are likely files missing
17
+ raise ValueError("Max diff reached. Please manually run CI on changed files.")
18
+
19
+ for file in files:
20
+ # Skip git internal files (but not .github/ directory)
21
+ if file.startswith(".git/"):
22
+ continue
23
+
24
+ # For any Python file, test file, or pyproject.toml change, run tests
25
+ if file.endswith((".py", ".toml")) or file.startswith(
26
+ ("langchain_anyllm/", "tests/", "examples/")
27
+ ):
28
+ dirs_to_run["test"].add(".")
29
+
30
+ # For workflow changes, run tests
31
+ if any(
32
+ file.startswith(dir_)
33
+ for dir_ in (
34
+ ".github/workflows",
35
+ ".github/tools",
36
+ ".github/actions",
37
+ ".github/scripts/check_diff.py",
38
+ )
39
+ ):
40
+ dirs_to_run["test"].update(LIB_DIRS)
41
+
42
+ outputs = {
43
+ "dirs-to-lint": list(dirs_to_run["lint"] | dirs_to_run["test"]),
44
+ "dirs-to-test": list(dirs_to_run["test"]),
45
+ }
46
+ for key, value in outputs.items():
47
+ json_output = json.dumps(value)
48
+ print(f"{key}={json_output}") # noqa: T201
@@ -0,0 +1,186 @@
1
+ import re
2
+ import sys
3
+ import tomllib
4
+ from collections import defaultdict
5
+ from typing import Optional
6
+
7
+ import requests
8
+ from packaging.requirements import Requirement
9
+ from packaging.specifiers import SpecifierSet
10
+ from packaging.version import Version, parse
11
+
12
+ MIN_VERSION_LIBS = ["langchain-core"]
13
+
14
+ # some libs only get checked on release because of simultaneous changes in
15
+ # multiple libs
16
+ SKIP_IF_PULL_REQUEST = ["langchain-core"]
17
+
18
+
19
+ def get_pypi_versions(package_name: str) -> list[str]:
20
+ """
21
+ Fetch all available versions for a package from PyPI.
22
+
23
+ Args:
24
+ package_name (str): Name of the package
25
+
26
+ Returns:
27
+ list[str]: List of all available versions
28
+
29
+ Raises:
30
+ requests.exceptions.RequestException: If PyPI API request fails
31
+ KeyError: If package not found or response format unexpected
32
+ """
33
+ pypi_url = f"https://pypi.org/pypi/{package_name}/json"
34
+ response = requests.get(pypi_url)
35
+ response.raise_for_status()
36
+ return list(response.json()["releases"].keys())
37
+
38
+
39
+ def get_minimum_version(package_name: str, spec_string: str) -> Optional[str]:
40
+ """
41
+ Find the minimum published version that satisfies the given constraints.
42
+
43
+ Args:
44
+ package_name (str): Name of the package
45
+ spec_string (str): Version specification string
46
+ (e.g., ">=0.2.43,<0.4.0,!=0.3.0")
47
+
48
+ Returns:
49
+ Optional[str]: Minimum compatible version or None if no compatible version found
50
+ """
51
+ # rewrite occurrences of ^0.0.z to 0.0.z (can be anywhere in constraint string)
52
+ spec_string = re.sub(r"\^0\.0\.(\d+)", r"0.0.\1", spec_string)
53
+ # rewrite occurrences of ^0.y.z to >=0.y.z,<0.y+1
54
+ # (can be anywhere in constraint string)
55
+ for y in range(1, 10):
56
+ spec_string = re.sub(
57
+ rf"\^0\.{y}\.(\d+)", rf">=0.{y}.\1,<0.{y + 1}", spec_string
58
+ )
59
+ # rewrite occurrences of ^x.y.z to >=x.y.z,<x+1.0.0
60
+ # (can be anywhere in constraint string)
61
+ for x in range(1, 10):
62
+ spec_string = re.sub(
63
+ rf"\^{x}\.(\d+)\.(\d+)", rf">={x}.\1.\2,<{x + 1}", spec_string
64
+ )
65
+
66
+ spec_set = SpecifierSet(spec_string)
67
+ all_versions = get_pypi_versions(package_name)
68
+
69
+ valid_versions = []
70
+ for version_str in all_versions:
71
+ try:
72
+ version = parse(version_str)
73
+ if spec_set.contains(version):
74
+ valid_versions.append(version)
75
+ except ValueError:
76
+ continue
77
+
78
+ return str(min(valid_versions)) if valid_versions else None
79
+
80
+
81
+ def _check_python_version_from_requirement(
82
+ requirement: Requirement, python_version: str
83
+ ) -> bool:
84
+ if not requirement.marker:
85
+ return True
86
+ else:
87
+ marker_str = str(requirement.marker)
88
+ if "python_version" or "python_full_version" in marker_str:
89
+ python_version_str = "".join(
90
+ char
91
+ for char in marker_str
92
+ if char.isdigit() or char in (".", "<", ">", "=", ",")
93
+ )
94
+ return check_python_version(python_version, python_version_str)
95
+ return True
96
+
97
+
98
+ def get_min_version_from_toml(
99
+ toml_path: str,
100
+ versions_for: str,
101
+ python_version: str,
102
+ *,
103
+ include: Optional[list] = None,
104
+ ):
105
+ # Parse the TOML file
106
+ with open(toml_path, "rb") as file:
107
+ toml_data = tomllib.load(file)
108
+
109
+ dependencies = defaultdict(list)
110
+ for dep in toml_data["project"]["dependencies"]:
111
+ requirement = Requirement(dep)
112
+ dependencies[requirement.name].append(requirement)
113
+
114
+ # Initialize a dictionary to store the minimum versions
115
+ min_versions = {}
116
+
117
+ # Iterate over the libs in MIN_VERSION_LIBS
118
+ for lib in set(MIN_VERSION_LIBS + (include or [])):
119
+ if versions_for == "pull_request" and lib in SKIP_IF_PULL_REQUEST:
120
+ # some libs only get checked on release because of simultaneous
121
+ # changes in multiple libs
122
+ continue
123
+ # Check if the lib is present in the dependencies
124
+ if lib in dependencies:
125
+ if include and lib not in include:
126
+ continue
127
+ requirements = dependencies[lib]
128
+ for requirement in requirements:
129
+ if _check_python_version_from_requirement(requirement, python_version):
130
+ version_string = str(requirement.specifier)
131
+ break
132
+
133
+ # Use parse_version to get the minimum supported version from version_string
134
+ min_version = get_minimum_version(lib, version_string)
135
+
136
+ # Store the minimum version in the min_versions dictionary
137
+ min_versions[lib] = min_version
138
+
139
+ return min_versions
140
+
141
+
142
+ def check_python_version(version_string, constraint_string):
143
+ """
144
+ Check if the given Python version matches the given constraints.
145
+
146
+ :param version_string: A string representing the Python version (e.g. "3.8.5").
147
+ :param constraint_string: A string representing the package's Python version
148
+ constraints (e.g. ">=3.6, <4.0").
149
+ :return: True if the version matches the constraints, False otherwise.
150
+ """
151
+
152
+ # rewrite occurrences of ^0.0.z to 0.0.z (can be anywhere in constraint string)
153
+ constraint_string = re.sub(r"\^0\.0\.(\d+)", r"0.0.\1", constraint_string)
154
+ # rewrite occurrences of ^0.y.z to >=0.y.z,<0.y+1.0
155
+ # (can be anywhere in constraint string)
156
+ for y in range(1, 10):
157
+ constraint_string = re.sub(
158
+ rf"\^0\.{y}\.(\d+)", rf">=0.{y}.\1,<0.{y + 1}.0", constraint_string
159
+ )
160
+ # rewrite occurrences of ^x.y.z to >=x.y.z,<x+1.0.0
161
+ # (can be anywhere in constraint string)
162
+ for x in range(1, 10):
163
+ constraint_string = re.sub(
164
+ rf"\^{x}\.0\.(\d+)", rf">={x}.0.\1,<{x + 1}.0.0", constraint_string
165
+ )
166
+
167
+ try:
168
+ version = Version(version_string)
169
+ constraints = SpecifierSet(constraint_string)
170
+ return version in constraints
171
+ except Exception as e:
172
+ print(f"Error: {e}")
173
+ return False
174
+
175
+
176
+ if __name__ == "__main__":
177
+ # Get the TOML file path from the command line argument
178
+ toml_file = sys.argv[1]
179
+ versions_for = sys.argv[2]
180
+ python_version = sys.argv[3]
181
+ assert versions_for in ["release", "pull_request"]
182
+
183
+ # Call the function to get the minimum versions
184
+ min_versions = get_min_version_from_toml(toml_file, versions_for, python_version)
185
+
186
+ print(" ".join([f"{lib}=={version}" for lib, version in min_versions.items()]))
@@ -0,0 +1,39 @@
1
+ ---
2
+ name: make spell_check
3
+
4
+ on:
5
+ workflow_call:
6
+ inputs:
7
+ working-directory:
8
+ required: true
9
+ type: string
10
+ description: "From which folder this pipeline executes"
11
+
12
+ permissions:
13
+ contents: read
14
+
15
+ jobs:
16
+ codespell:
17
+ name: (Check for spelling errors)
18
+ runs-on: ubuntu-latest
19
+
20
+ steps:
21
+ - name: Checkout
22
+ uses: actions/checkout@v4
23
+
24
+ - name: Install Dependencies
25
+ run: |
26
+ pip install toml
27
+
28
+ - name: Extract Ignore Words List
29
+ working-directory: ${{ inputs.working-directory }}
30
+ run: |
31
+ # Use a Python script to extract the ignore words list from pyproject.toml
32
+ python ../../.github/workflows/extract_ignored_words_list.py
33
+ id: extract_ignore_words
34
+
35
+ - name: Codespell
36
+ uses: codespell-project/actions-codespell@v2
37
+ with:
38
+ skip: guide_imports.json
39
+ ignore_words_list: ${{ steps.extract_ignore_words.outputs.ignore_words_list }}
@@ -0,0 +1,52 @@
1
+ name: compile-integration-test
2
+
3
+ on:
4
+ workflow_call:
5
+ inputs:
6
+ working-directory:
7
+ required: true
8
+ type: string
9
+ description: "From which folder this pipeline executes"
10
+
11
+ env:
12
+ UV_FROZEN: "true"
13
+
14
+ jobs:
15
+ build:
16
+ defaults:
17
+ run:
18
+ working-directory: ${{ inputs.working-directory }}
19
+ runs-on: ubuntu-latest
20
+ strategy:
21
+ matrix:
22
+ python-version:
23
+ - "3.11"
24
+ - "3.13"
25
+ name: "uv run pytest -m compile tests/integration_tests"
26
+ steps:
27
+ - uses: actions/checkout@v4
28
+
29
+ - name: Set up Python ${{ matrix.python-version }} + uv
30
+ uses: "./.github/actions/uv_setup"
31
+ with:
32
+ python-version: ${{ matrix.python-version }}
33
+
34
+ - name: Install integration dependencies
35
+ shell: bash
36
+ run: uv sync --group test
37
+
38
+ - name: Check integration tests compile
39
+ shell: bash
40
+ run: uv run pytest -m compile tests/integration_tests
41
+
42
+ - name: Ensure the tests did not create any additional files
43
+ shell: bash
44
+ run: |
45
+ set -eu
46
+
47
+ STATUS="$(git status)"
48
+ echo "$STATUS"
49
+
50
+ # grep will exit non-zero if the target message isn't found,
51
+ # and `set -e` above will cause the step to fail.
52
+ echo "$STATUS" | grep 'nothing to commit, working tree clean'
@@ -0,0 +1,42 @@
1
+ name: Lint
2
+
3
+ on:
4
+ push:
5
+ branches: [main]
6
+ pull_request:
7
+ workflow_dispatch:
8
+
9
+ jobs:
10
+ run-linter:
11
+ timeout-minutes: 30
12
+ runs-on: ubuntu-latest
13
+
14
+ steps:
15
+ - name: Check out the repository
16
+ uses: actions/checkout@v5
17
+
18
+ - name: Install the latest version of uv and set the python version to 3.13
19
+ uses: astral-sh/setup-uv@v7
20
+ with:
21
+ python-version: 3.13
22
+ activate-environment: true
23
+
24
+ - name: Install dependencies
25
+ run: uv sync --group dev --group lint
26
+
27
+ - uses: actions/cache@v4
28
+ with:
29
+ path: .mypy_cache
30
+ key: ${{ runner.os }}-mypy-${{ hashFiles('pyproject.toml') }}
31
+ restore-keys: |
32
+ ${{ runner.os }}-mypy-
33
+
34
+ - uses: actions/cache@v4
35
+ with:
36
+ path: ~/.cache/pre-commit
37
+ key: ${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
38
+ restore-keys: |
39
+ ${{ runner.os }}-pre-commit-
40
+
41
+ - name: pre-commit
42
+ run: uv run pre-commit run --all-files --verbose
@@ -0,0 +1,271 @@
1
+ name: release
2
+ run-name: Release ${{ inputs.working-directory }} by @${{ github.actor }}
3
+ on:
4
+ workflow_call:
5
+ inputs:
6
+ working-directory:
7
+ required: true
8
+ type: string
9
+ description: "From which folder this pipeline executes"
10
+ workflow_dispatch:
11
+ inputs:
12
+ working-directory:
13
+ required: true
14
+ type: string
15
+ default: '.'
16
+ dangerous-nonmaster-release:
17
+ required: false
18
+ type: boolean
19
+ default: false
20
+ description: "Release from a non-master branch (danger!)"
21
+
22
+ env:
23
+ PYTHON_VERSION: "3.11"
24
+ UV_FROZEN: "true"
25
+ UV_NO_SYNC: "true"
26
+
27
+ jobs:
28
+ build:
29
+ if: github.ref == 'refs/heads/main' || inputs.dangerous-nonmaster-release
30
+ runs-on: ubuntu-latest
31
+
32
+ outputs:
33
+ pkg-name: ${{ steps.check-version.outputs.pkg-name }}
34
+ version: ${{ steps.check-version.outputs.version }}
35
+
36
+ steps:
37
+ - uses: actions/checkout@v4
38
+
39
+ - name: Set up Python + uv
40
+ uses: "./.github/actions/uv_setup"
41
+ with:
42
+ python-version: ${{ env.PYTHON_VERSION }}
43
+
44
+ # We want to keep this build stage *separate* from the release stage,
45
+ # so that there's no sharing of permissions between them.
46
+ # The release stage has trusted publishing and GitHub repo contents write access,
47
+ # and we want to keep the scope of that access limited just to the release job.
48
+ # Otherwise, a malicious `build` step (e.g. via a compromised dependency)
49
+ # could get access to our GitHub or PyPI credentials.
50
+ #
51
+ # Per the trusted publishing GitHub Action:
52
+ # > It is strongly advised to separate jobs for building [...]
53
+ # > from the publish job.
54
+ # https://github.com/pypa/gh-action-pypi-publish#non-goals
55
+ - name: Build project for distribution
56
+ run: uv build
57
+ working-directory: ${{ inputs.working-directory }}
58
+
59
+ - name: Upload build
60
+ uses: actions/upload-artifact@v4
61
+ with:
62
+ name: dist
63
+ path: ${{ inputs.working-directory }}/dist/
64
+
65
+ - name: Check Version
66
+ id: check-version
67
+ shell: python
68
+ working-directory: ${{ inputs.working-directory }}
69
+ run: |
70
+ import os
71
+ import tomllib
72
+ with open("pyproject.toml", "rb") as f:
73
+ data = tomllib.load(f)
74
+ pkg_name = data["project"]["name"]
75
+ version = data["project"]["version"]
76
+ with open(os.environ["GITHUB_OUTPUT"], "a") as f:
77
+ f.write(f"pkg-name={pkg_name}\n")
78
+ f.write(f"version={version}\n")
79
+
80
+ test-pypi-publish:
81
+ needs:
82
+ - build
83
+ uses:
84
+ ./.github/workflows/_test_release.yml
85
+ permissions: write-all
86
+ with:
87
+ working-directory: ${{ inputs.working-directory }}
88
+ dangerous-nonmaster-release: ${{ inputs.dangerous-nonmaster-release }}
89
+ secrets: inherit
90
+
91
+ pre-release-checks:
92
+ needs:
93
+ - build
94
+ - test-pypi-publish
95
+ runs-on: ubuntu-latest
96
+ steps:
97
+ - uses: actions/checkout@v4
98
+
99
+ # We explicitly *don't* set up caching here. This ensures our tests are
100
+ # maximally sensitive to catching breakage.
101
+ #
102
+ # For example, here's a way that caching can cause a falsely-passing test:
103
+ # - Make the langchain package manifest no longer list a dependency package
104
+ # as a requirement. This means it won't be installed by `pip install`,
105
+ # and attempting to use it would cause a crash.
106
+ # - That dependency used to be required, so it may have been cached.
107
+ # When restoring the venv packages from cache, that dependency gets included.
108
+ # - Tests pass, because the dependency is present even though it wasn't specified.
109
+ # - The package is published, and it breaks on the missing dependency when
110
+ # used in the real world.
111
+
112
+ - name: Set up Python + uv
113
+ uses: "./.github/actions/uv_setup"
114
+ id: setup-python
115
+ with:
116
+ python-version: ${{ env.PYTHON_VERSION }}
117
+
118
+ - uses: actions/download-artifact@v4
119
+ with:
120
+ name: dist
121
+ path: ${{ inputs.working-directory }}/dist/
122
+
123
+ - name: Import dist package
124
+ shell: bash
125
+ working-directory: ${{ inputs.working-directory }}
126
+ env:
127
+ PKG_NAME: ${{ needs.build.outputs.pkg-name }}
128
+ VERSION: ${{ needs.build.outputs.version }}
129
+ # Here we use:
130
+ # - The default regular PyPI index as the *primary* index, meaning
131
+ # that it takes priority (https://pypi.org/simple)
132
+ # - The test PyPI index as an extra index, so that any dependencies that
133
+ # are not found on test PyPI can be resolved and installed anyway.
134
+ # (https://test.pypi.org/simple). This will include the PKG_NAME==VERSION
135
+ # package because VERSION will not have been uploaded to regular PyPI yet.
136
+ # - attempt install again after 5 seconds if it fails because there is
137
+ # sometimes a delay in availability on test pypi
138
+ run: |
139
+ uv venv
140
+ VIRTUAL_ENV=.venv uv pip install dist/*.whl
141
+
142
+ # Replace all dashes in the package name with underscores,
143
+ # since that's how Python imports packages with dashes in the name.
144
+ IMPORT_NAME="$(echo "$PKG_NAME" | sed s/-/_/g)"
145
+
146
+ uv run python -c "import $IMPORT_NAME; print(dir($IMPORT_NAME))"
147
+
148
+ - name: Import test dependencies
149
+ run: uv sync --group test
150
+ working-directory: ${{ inputs.working-directory }}
151
+
152
+ # Overwrite the local version of the package with the built version
153
+ - name: Import published package (again)
154
+ working-directory: ${{ inputs.working-directory }}
155
+ shell: bash
156
+ env:
157
+ PKG_NAME: ${{ needs.build.outputs.pkg-name }}
158
+ VERSION: ${{ needs.build.outputs.version }}
159
+ run: |
160
+ VIRTUAL_ENV=.venv uv pip install dist/*.whl
161
+
162
+ - name: Run unit tests
163
+ run: make tests
164
+ working-directory: ${{ inputs.working-directory }}
165
+
166
+ - name: Run integration tests
167
+ env:
168
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
169
+ OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
170
+ run: |
171
+ uv sync --group test
172
+ make integration_tests
173
+ working-directory: ${{ inputs.working-directory }}
174
+
175
+ - name: Get minimum versions
176
+ working-directory: ${{ inputs.working-directory }}
177
+ id: min-version
178
+ run: |
179
+ VIRTUAL_ENV=.venv uv pip install packaging requests
180
+ python_version="$(uv run python --version | awk '{print $2}')"
181
+ min_versions="$(uv run python $GITHUB_WORKSPACE/.github/scripts/get_min_versions.py pyproject.toml release $python_version)"
182
+ echo "min-versions=$min_versions" >> "$GITHUB_OUTPUT"
183
+ echo "min-versions=$min_versions"
184
+
185
+ - name: Run unit tests with minimum dependency versions
186
+ if: ${{ steps.min-version.outputs.min-versions != '' }}
187
+ env:
188
+ MIN_VERSIONS: ${{ steps.min-version.outputs.min-versions }}
189
+ run: |
190
+ VIRTUAL_ENV=.venv uv pip install $MIN_VERSIONS
191
+ make tests
192
+ working-directory: ${{ inputs.working-directory }}
193
+
194
+ publish:
195
+ needs:
196
+ - build
197
+ - test-pypi-publish
198
+ - pre-release-checks
199
+ runs-on: ubuntu-latest
200
+ permissions:
201
+ # This permission is used for trusted publishing:
202
+ # https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/
203
+ #
204
+ # Trusted publishing has to also be configured on PyPI for each package:
205
+ # https://docs.pypi.org/trusted-publishers/adding-a-publisher/
206
+ id-token: write
207
+
208
+ defaults:
209
+ run:
210
+ working-directory: ${{ inputs.working-directory }}
211
+
212
+ steps:
213
+ - uses: actions/checkout@v4
214
+
215
+ - name: Set up Python + uv
216
+ uses: "./.github/actions/uv_setup"
217
+ with:
218
+ python-version: ${{ env.PYTHON_VERSION }}
219
+
220
+ - uses: actions/download-artifact@v4
221
+ with:
222
+ name: dist
223
+ path: ${{ inputs.working-directory }}/dist/
224
+
225
+ - name: Publish package distributions to PyPI
226
+ uses: pypa/gh-action-pypi-publish@release/v1
227
+ with:
228
+ packages-dir: ${{ inputs.working-directory }}/dist/
229
+ verbose: true
230
+ print-hash: true
231
+ # Temp workaround since attestations are on by default as of gh-action-pypi-publish v1\.11\.0
232
+ attestations: false
233
+
234
+ mark-release:
235
+ needs:
236
+ - build
237
+ - test-pypi-publish
238
+ - pre-release-checks
239
+ - publish
240
+ runs-on: ubuntu-latest
241
+ permissions:
242
+ # This permission is needed by `ncipollo/release-action` to
243
+ # create the GitHub release.
244
+ contents: write
245
+
246
+ defaults:
247
+ run:
248
+ working-directory: ${{ inputs.working-directory }}
249
+
250
+ steps:
251
+ - uses: actions/checkout@v4
252
+
253
+ - name: Set up Python + uv
254
+ uses: "./.github/actions/uv_setup"
255
+ with:
256
+ python-version: ${{ env.PYTHON_VERSION }}
257
+
258
+ - uses: actions/download-artifact@v4
259
+ with:
260
+ name: dist
261
+ path: ${{ inputs.working-directory }}/dist/
262
+
263
+ - name: Create Release
264
+ uses: ncipollo/release-action@v1
265
+ with:
266
+ artifacts: "dist/*"
267
+ token: ${{ secrets.GITHUB_TOKEN }}
268
+ draft: false
269
+ generateReleaseNotes: true
270
+ tag: ${{ inputs.working-directory }}/v${{ needs.build.outputs.version }}
271
+ commit: ${{ github.sha }}