pyrig 2.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyrig/__init__.py +1 -0
- pyrig/dev/__init__.py +6 -0
- pyrig/dev/builders/__init__.py +1 -0
- pyrig/dev/builders/base/__init__.py +5 -0
- pyrig/dev/builders/base/base.py +256 -0
- pyrig/dev/builders/pyinstaller.py +229 -0
- pyrig/dev/cli/__init__.py +5 -0
- pyrig/dev/cli/cli.py +95 -0
- pyrig/dev/cli/commands/__init__.py +1 -0
- pyrig/dev/cli/commands/build_artifacts.py +16 -0
- pyrig/dev/cli/commands/create_root.py +25 -0
- pyrig/dev/cli/commands/create_tests.py +244 -0
- pyrig/dev/cli/commands/init_project.py +160 -0
- pyrig/dev/cli/commands/make_inits.py +27 -0
- pyrig/dev/cli/commands/protect_repo.py +145 -0
- pyrig/dev/cli/shared_subcommands.py +20 -0
- pyrig/dev/cli/subcommands.py +73 -0
- pyrig/dev/configs/__init__.py +1 -0
- pyrig/dev/configs/base/__init__.py +5 -0
- pyrig/dev/configs/base/base.py +826 -0
- pyrig/dev/configs/containers/__init__.py +1 -0
- pyrig/dev/configs/containers/container_file.py +111 -0
- pyrig/dev/configs/dot_env.py +95 -0
- pyrig/dev/configs/dot_python_version.py +88 -0
- pyrig/dev/configs/git/__init__.py +5 -0
- pyrig/dev/configs/git/gitignore.py +181 -0
- pyrig/dev/configs/git/pre_commit.py +170 -0
- pyrig/dev/configs/licence.py +112 -0
- pyrig/dev/configs/markdown/__init__.py +1 -0
- pyrig/dev/configs/markdown/docs/__init__.py +1 -0
- pyrig/dev/configs/markdown/docs/index.py +38 -0
- pyrig/dev/configs/markdown/readme.py +132 -0
- pyrig/dev/configs/py_typed.py +28 -0
- pyrig/dev/configs/pyproject.py +436 -0
- pyrig/dev/configs/python/__init__.py +5 -0
- pyrig/dev/configs/python/builders_init.py +27 -0
- pyrig/dev/configs/python/configs_init.py +28 -0
- pyrig/dev/configs/python/dot_experiment.py +46 -0
- pyrig/dev/configs/python/main.py +59 -0
- pyrig/dev/configs/python/resources_init.py +27 -0
- pyrig/dev/configs/python/shared_subcommands.py +29 -0
- pyrig/dev/configs/python/src_init.py +27 -0
- pyrig/dev/configs/python/subcommands.py +27 -0
- pyrig/dev/configs/testing/__init__.py +5 -0
- pyrig/dev/configs/testing/conftest.py +64 -0
- pyrig/dev/configs/testing/fixtures_init.py +27 -0
- pyrig/dev/configs/testing/main_test.py +74 -0
- pyrig/dev/configs/testing/zero_test.py +43 -0
- pyrig/dev/configs/workflows/__init__.py +5 -0
- pyrig/dev/configs/workflows/base/__init__.py +5 -0
- pyrig/dev/configs/workflows/base/base.py +1662 -0
- pyrig/dev/configs/workflows/build.py +106 -0
- pyrig/dev/configs/workflows/health_check.py +133 -0
- pyrig/dev/configs/workflows/publish.py +68 -0
- pyrig/dev/configs/workflows/release.py +90 -0
- pyrig/dev/tests/__init__.py +5 -0
- pyrig/dev/tests/conftest.py +40 -0
- pyrig/dev/tests/fixtures/__init__.py +1 -0
- pyrig/dev/tests/fixtures/assertions.py +147 -0
- pyrig/dev/tests/fixtures/autouse/__init__.py +5 -0
- pyrig/dev/tests/fixtures/autouse/class_.py +42 -0
- pyrig/dev/tests/fixtures/autouse/module.py +40 -0
- pyrig/dev/tests/fixtures/autouse/session.py +589 -0
- pyrig/dev/tests/fixtures/factories.py +118 -0
- pyrig/dev/utils/__init__.py +1 -0
- pyrig/dev/utils/cli.py +17 -0
- pyrig/dev/utils/git.py +312 -0
- pyrig/dev/utils/packages.py +93 -0
- pyrig/dev/utils/resources.py +77 -0
- pyrig/dev/utils/testing.py +66 -0
- pyrig/dev/utils/versions.py +268 -0
- pyrig/main.py +9 -0
- pyrig/py.typed +0 -0
- pyrig/resources/GITIGNORE +216 -0
- pyrig/resources/LATEST_PYTHON_VERSION +1 -0
- pyrig/resources/MIT_LICENSE_TEMPLATE +21 -0
- pyrig/resources/__init__.py +1 -0
- pyrig/src/__init__.py +1 -0
- pyrig/src/git/__init__.py +6 -0
- pyrig/src/git/git.py +146 -0
- pyrig/src/graph.py +255 -0
- pyrig/src/iterate.py +107 -0
- pyrig/src/modules/__init__.py +22 -0
- pyrig/src/modules/class_.py +369 -0
- pyrig/src/modules/function.py +189 -0
- pyrig/src/modules/inspection.py +148 -0
- pyrig/src/modules/module.py +658 -0
- pyrig/src/modules/package.py +452 -0
- pyrig/src/os/__init__.py +6 -0
- pyrig/src/os/os.py +121 -0
- pyrig/src/project/__init__.py +5 -0
- pyrig/src/project/mgt.py +83 -0
- pyrig/src/resource.py +58 -0
- pyrig/src/string.py +100 -0
- pyrig/src/testing/__init__.py +6 -0
- pyrig/src/testing/assertions.py +66 -0
- pyrig/src/testing/convention.py +203 -0
- pyrig-2.2.6.dist-info/METADATA +174 -0
- pyrig-2.2.6.dist-info/RECORD +102 -0
- pyrig-2.2.6.dist-info/WHEEL +4 -0
- pyrig-2.2.6.dist-info/entry_points.txt +3 -0
- pyrig-2.2.6.dist-info/licenses/LICENSE +21 -0
pyrig/dev/utils/cli.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""CLI utilities."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from pyrig.src.modules.package import get_pkg_name_from_project_name
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_project_name_from_argv() -> str:
|
|
10
|
+
"""Get the project name."""
|
|
11
|
+
return Path(sys.argv[0]).name
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def get_pkg_name_from_argv() -> str:
|
|
15
|
+
"""Get the project and package name."""
|
|
16
|
+
project_name = get_project_name_from_argv()
|
|
17
|
+
return get_pkg_name_from_project_name(project_name)
|
pyrig/dev/utils/git.py
ADDED
|
@@ -0,0 +1,312 @@
|
|
|
1
|
+
"""GitHub repository API utilities and ruleset management.
|
|
2
|
+
|
|
3
|
+
This module provides low-level utilities for interacting with the GitHub API,
|
|
4
|
+
specifically for repository rulesets. It uses the PyGithub library for
|
|
5
|
+
authentication and API calls.
|
|
6
|
+
|
|
7
|
+
Rulesets are GitHub's newer mechanism for branch protection, offering more
|
|
8
|
+
flexibility than the older branch protection rules. This module provides
|
|
9
|
+
functions to create, update, and query rulesets.
|
|
10
|
+
|
|
11
|
+
Attributes:
|
|
12
|
+
DEFAULT_BRANCH: The default branch name used by pyrig ("main").
|
|
13
|
+
DEFAULT_RULESET_NAME: The name of the default protection ruleset.
|
|
14
|
+
|
|
15
|
+
Example:
|
|
16
|
+
>>> from pyrig.src.git.github.repo.repo import get_repo, create_or_update_ruleset
|
|
17
|
+
>>> repo = get_repo(token, "owner", "repo_name")
|
|
18
|
+
>>> rules = get_rules_payload(pull_request={"required_approving_review_count": 1})
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
import logging
|
|
22
|
+
import os
|
|
23
|
+
from pathlib import Path
|
|
24
|
+
from typing import Any, Literal
|
|
25
|
+
|
|
26
|
+
from dotenv import dotenv_values
|
|
27
|
+
from github import Github
|
|
28
|
+
from github.Auth import Token
|
|
29
|
+
from github.Repository import Repository
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
DEFAULT_BRANCH = "main"
|
|
34
|
+
|
|
35
|
+
DEFAULT_RULESET_NAME = f"{DEFAULT_BRANCH} protection"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_rules_payload( # noqa: PLR0913
|
|
39
|
+
*,
|
|
40
|
+
creation: dict[str, Any] | None = None,
|
|
41
|
+
update: dict[str, Any] | None = None,
|
|
42
|
+
deletion: dict[str, Any] | None = None,
|
|
43
|
+
required_linear_history: dict[str, Any] | None = None,
|
|
44
|
+
merge_queue: dict[str, Any] | None = None,
|
|
45
|
+
required_deployments: dict[str, Any] | None = None,
|
|
46
|
+
required_signatures: dict[str, Any] | None = None,
|
|
47
|
+
pull_request: dict[str, Any] | None = None,
|
|
48
|
+
required_status_checks: dict[str, Any] | None = None,
|
|
49
|
+
non_fast_forward: dict[str, Any] | None = None,
|
|
50
|
+
commit_message_pattern: dict[str, Any] | None = None,
|
|
51
|
+
commit_author_email_pattern: dict[str, Any] | None = None,
|
|
52
|
+
committer_email_pattern: dict[str, Any] | None = None,
|
|
53
|
+
branch_name_pattern: dict[str, Any] | None = None,
|
|
54
|
+
tag_name_pattern: dict[str, Any] | None = None,
|
|
55
|
+
file_path_restriction: dict[str, Any] | None = None,
|
|
56
|
+
max_file_path_length: dict[str, Any] | None = None,
|
|
57
|
+
file_extension_restriction: dict[str, Any] | None = None,
|
|
58
|
+
max_file_size: dict[str, Any] | None = None,
|
|
59
|
+
workflows: dict[str, Any] | None = None,
|
|
60
|
+
code_scanning: dict[str, Any] | None = None,
|
|
61
|
+
copilot_code_review: dict[str, Any] | None = None,
|
|
62
|
+
) -> list[dict[str, Any]]:
|
|
63
|
+
"""Build a rules array for a GitHub ruleset.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
creation: Only allow users with bypass permission to create matching
|
|
67
|
+
refs.
|
|
68
|
+
update: Only allow users with bypass permission to update matching
|
|
69
|
+
refs.
|
|
70
|
+
deletion: Only allow users with bypass permissions to delete matching
|
|
71
|
+
refs.
|
|
72
|
+
required_linear_history: Prevent merge commits from being pushed to
|
|
73
|
+
matching refs.
|
|
74
|
+
merge_queue: Merges must be performed via a merge queue.
|
|
75
|
+
required_deployments: Choose which environments must be successfully
|
|
76
|
+
deployed to before refs can be pushed.
|
|
77
|
+
required_signatures: Commits pushed to matching refs must have verified
|
|
78
|
+
signatures.
|
|
79
|
+
pull_request: Require all commits be made to a non-target branch and
|
|
80
|
+
submitted via a pull request.
|
|
81
|
+
required_status_checks: Choose which status checks must pass before the
|
|
82
|
+
ref is updated.
|
|
83
|
+
non_fast_forward: Prevent users with push access from force pushing to
|
|
84
|
+
refs.
|
|
85
|
+
commit_message_pattern: Parameters to be used for the
|
|
86
|
+
commit_message_pattern rule.
|
|
87
|
+
commit_author_email_pattern: Parameters to be used for the
|
|
88
|
+
commit_author_email_pattern rule.
|
|
89
|
+
committer_email_pattern: Parameters to be used for the
|
|
90
|
+
committer_email_pattern rule.
|
|
91
|
+
branch_name_pattern: Parameters to be used for the branch_name_pattern
|
|
92
|
+
rule.
|
|
93
|
+
tag_name_pattern: Parameters to be used for the tag_name_pattern rule.
|
|
94
|
+
file_path_restriction: Prevent commits that include changes in
|
|
95
|
+
specified file and folder paths.
|
|
96
|
+
max_file_path_length: Prevent commits that include file paths that
|
|
97
|
+
exceed the specified character limit.
|
|
98
|
+
file_extension_restriction: Prevent commits that include files with
|
|
99
|
+
specified file extensions.
|
|
100
|
+
max_file_size: Prevent commits with individual files that exceed the
|
|
101
|
+
specified limit.
|
|
102
|
+
workflows: Require all changes made to a targeted branch to pass the
|
|
103
|
+
specified workflows.
|
|
104
|
+
code_scanning: Choose which tools must provide code scanning results
|
|
105
|
+
before the reference is updated.
|
|
106
|
+
copilot_code_review: Request Copilot code review for new pull requests
|
|
107
|
+
automatically.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
A list of rule objects to be used in a GitHub ruleset.
|
|
111
|
+
"""
|
|
112
|
+
rules: list[dict[str, Any]] = []
|
|
113
|
+
|
|
114
|
+
rule_map = {
|
|
115
|
+
"creation": creation,
|
|
116
|
+
"update": update,
|
|
117
|
+
"deletion": deletion,
|
|
118
|
+
"required_linear_history": required_linear_history,
|
|
119
|
+
"merge_queue": merge_queue,
|
|
120
|
+
"required_deployments": required_deployments,
|
|
121
|
+
"required_signatures": required_signatures,
|
|
122
|
+
"pull_request": pull_request,
|
|
123
|
+
"required_status_checks": required_status_checks,
|
|
124
|
+
"non_fast_forward": non_fast_forward,
|
|
125
|
+
"commit_message_pattern": commit_message_pattern,
|
|
126
|
+
"commit_author_email_pattern": commit_author_email_pattern,
|
|
127
|
+
"committer_email_pattern": committer_email_pattern,
|
|
128
|
+
"branch_name_pattern": branch_name_pattern,
|
|
129
|
+
"tag_name_pattern": tag_name_pattern,
|
|
130
|
+
"file_path_restriction": file_path_restriction,
|
|
131
|
+
"max_file_path_length": max_file_path_length,
|
|
132
|
+
"file_extension_restriction": file_extension_restriction,
|
|
133
|
+
"max_file_size": max_file_size,
|
|
134
|
+
"workflows": workflows,
|
|
135
|
+
"code_scanning": code_scanning,
|
|
136
|
+
"copilot_code_review": copilot_code_review,
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
for rule_type, rule_config in rule_map.items():
|
|
140
|
+
if rule_config is not None:
|
|
141
|
+
rule_obj: dict[str, Any] = {"type": rule_type}
|
|
142
|
+
if rule_config: # If there are parameters
|
|
143
|
+
rule_obj["parameters"] = rule_config
|
|
144
|
+
rules.append(rule_obj)
|
|
145
|
+
|
|
146
|
+
return rules
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def create_or_update_ruleset( # noqa: PLR0913
|
|
150
|
+
token: str,
|
|
151
|
+
owner: str,
|
|
152
|
+
repo_name: str,
|
|
153
|
+
*,
|
|
154
|
+
ruleset_name: str,
|
|
155
|
+
enforcement: Literal["active", "disabled", "evaluate"] = "active",
|
|
156
|
+
target: Literal["branch", "tag", "push"] = "branch",
|
|
157
|
+
bypass_actors: list[dict[str, Any]] | None = None,
|
|
158
|
+
conditions: dict[
|
|
159
|
+
Literal["ref_name"], dict[Literal["include", "exclude"], list[str]]
|
|
160
|
+
]
|
|
161
|
+
| None = None,
|
|
162
|
+
rules: list[dict[str, Any]] | None = None,
|
|
163
|
+
) -> Any:
|
|
164
|
+
"""Create or update a repository ruleset.
|
|
165
|
+
|
|
166
|
+
If a ruleset with the given name exists, it is updated. Otherwise,
|
|
167
|
+
a new ruleset is created.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
token: GitHub API token with repo permissions.
|
|
171
|
+
owner: Repository owner (user or organization).
|
|
172
|
+
repo_name: Repository name.
|
|
173
|
+
ruleset_name: Name for the ruleset.
|
|
174
|
+
enforcement: Enforcement level ("active", "disabled", or "evaluate").
|
|
175
|
+
target: What the ruleset applies to ("branch", "tag", or "push").
|
|
176
|
+
bypass_actors: List of actors who can bypass the ruleset.
|
|
177
|
+
conditions: Branch/tag name patterns to include or exclude.
|
|
178
|
+
rules: List of rule objects from `get_rules_payload()`.
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
The API response containing the created/updated ruleset.
|
|
182
|
+
"""
|
|
183
|
+
repo = get_repo(token, owner, repo_name)
|
|
184
|
+
ruleset_id = ruleset_exists(
|
|
185
|
+
token=token, owner=owner, repo_name=repo_name, ruleset_name=ruleset_name
|
|
186
|
+
)
|
|
187
|
+
method = "PUT" if ruleset_id else "POST"
|
|
188
|
+
url = f"{repo.url}/rulesets"
|
|
189
|
+
|
|
190
|
+
if ruleset_id:
|
|
191
|
+
url += f"/{ruleset_id}"
|
|
192
|
+
|
|
193
|
+
payload: dict[str, Any] = {
|
|
194
|
+
"name": ruleset_name,
|
|
195
|
+
"enforcement": enforcement,
|
|
196
|
+
"target": target,
|
|
197
|
+
"conditions": conditions,
|
|
198
|
+
"rules": rules,
|
|
199
|
+
}
|
|
200
|
+
if bypass_actors:
|
|
201
|
+
payload["bypass_actors"] = bypass_actors
|
|
202
|
+
|
|
203
|
+
_headers, res = repo._requester.requestJsonAndCheck( # noqa: SLF001
|
|
204
|
+
method,
|
|
205
|
+
url,
|
|
206
|
+
headers={
|
|
207
|
+
"Accept": "application/vnd.github+json",
|
|
208
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
209
|
+
},
|
|
210
|
+
input=payload,
|
|
211
|
+
)
|
|
212
|
+
|
|
213
|
+
return res
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def get_all_rulesets(token: str, owner: str, repo_name: str) -> Any:
|
|
217
|
+
"""Retrieve all rulesets defined for a repository.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
token: GitHub API token.
|
|
221
|
+
owner: Repository owner.
|
|
222
|
+
repo_name: Repository name.
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
A list of ruleset objects from the GitHub API.
|
|
226
|
+
"""
|
|
227
|
+
repo = get_repo(token, owner, repo_name)
|
|
228
|
+
url = f"{repo.url}/rulesets"
|
|
229
|
+
method = "GET"
|
|
230
|
+
_headers, res = repo._requester.requestJsonAndCheck( # noqa: SLF001
|
|
231
|
+
method,
|
|
232
|
+
url,
|
|
233
|
+
headers={
|
|
234
|
+
"Accept": "application/vnd.github+json",
|
|
235
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
236
|
+
},
|
|
237
|
+
)
|
|
238
|
+
return res
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def get_repo(token: str, owner: str, repo_name: str) -> Repository:
|
|
242
|
+
"""Get a PyGithub Repository object for API operations.
|
|
243
|
+
|
|
244
|
+
Args:
|
|
245
|
+
token: GitHub API token.
|
|
246
|
+
owner: Repository owner (user or organization).
|
|
247
|
+
repo_name: Repository name.
|
|
248
|
+
|
|
249
|
+
Returns:
|
|
250
|
+
A PyGithub Repository object.
|
|
251
|
+
"""
|
|
252
|
+
auth = Token(token)
|
|
253
|
+
github = Github(auth=auth)
|
|
254
|
+
return github.get_repo(f"{owner}/{repo_name}")
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def ruleset_exists(token: str, owner: str, repo_name: str, ruleset_name: str) -> int:
|
|
258
|
+
"""Check if a ruleset with the given name exists.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
token: GitHub API token.
|
|
262
|
+
owner: Repository owner.
|
|
263
|
+
repo_name: Repository name.
|
|
264
|
+
ruleset_name: Name of the ruleset to check for.
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
The ruleset ID if it exists, 0 otherwise.
|
|
268
|
+
"""
|
|
269
|
+
rulesets = get_all_rulesets(token, owner, repo_name)
|
|
270
|
+
main_ruleset = next((rs for rs in rulesets if rs["name"] == ruleset_name), None)
|
|
271
|
+
return main_ruleset["id"] if main_ruleset else 0
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def get_github_repo_token() -> str:
|
|
275
|
+
"""Retrieve the GitHub repository token for API authentication.
|
|
276
|
+
|
|
277
|
+
Attempts to find a GitHub token in the following order:
|
|
278
|
+
1. The `REPO_TOKEN` environment variable
|
|
279
|
+
2. The `REPO_TOKEN` key in the project's `.env` file
|
|
280
|
+
|
|
281
|
+
This priority order ensures CI/CD environments (which typically set
|
|
282
|
+
environment variables) work seamlessly while allowing local development
|
|
283
|
+
to use .env files.
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
The GitHub token string.
|
|
287
|
+
|
|
288
|
+
Raises:
|
|
289
|
+
ValueError: If no token is found in either location, or if the
|
|
290
|
+
.env file doesn't exist when falling back to it.
|
|
291
|
+
|
|
292
|
+
Note:
|
|
293
|
+
The token should have appropriate permissions for the intended
|
|
294
|
+
operations (e.g., repo scope for branch protection rules).
|
|
295
|
+
"""
|
|
296
|
+
# try os env first
|
|
297
|
+
token = os.getenv("REPO_TOKEN")
|
|
298
|
+
if token:
|
|
299
|
+
return token
|
|
300
|
+
|
|
301
|
+
# try .env next
|
|
302
|
+
dotenv_path = Path(".env")
|
|
303
|
+
if not dotenv_path.exists():
|
|
304
|
+
msg = f"Expected {dotenv_path} to exist"
|
|
305
|
+
raise ValueError(msg)
|
|
306
|
+
dotenv = dotenv_values(dotenv_path)
|
|
307
|
+
token = dotenv.get("REPO_TOKEN")
|
|
308
|
+
if token:
|
|
309
|
+
return token
|
|
310
|
+
|
|
311
|
+
msg = f"Expected REPO_TOKEN in {dotenv_path}"
|
|
312
|
+
raise ValueError(msg)
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
"""Helper functions for working with Python packages."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Iterable
|
|
4
|
+
from importlib import import_module
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from types import ModuleType
|
|
7
|
+
|
|
8
|
+
from setuptools import find_namespace_packages as _find_namespace_packages
|
|
9
|
+
from setuptools import find_packages as _find_packages
|
|
10
|
+
|
|
11
|
+
from pyrig.src.modules.module import to_path
|
|
12
|
+
from pyrig.src.modules.package import DOCS_DIR_NAME
|
|
13
|
+
from pyrig.src.testing.convention import TESTS_PACKAGE_NAME
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def find_packages(
|
|
17
|
+
*,
|
|
18
|
+
depth: int | None = None,
|
|
19
|
+
include_namespace_packages: bool = False,
|
|
20
|
+
where: str = ".",
|
|
21
|
+
exclude: Iterable[str] | None = None,
|
|
22
|
+
include: Iterable[str] = ("*",),
|
|
23
|
+
) -> list[str]:
|
|
24
|
+
"""Discover Python packages in the specified directory.
|
|
25
|
+
|
|
26
|
+
Finds all Python packages in the given directory, with options to filter
|
|
27
|
+
by depth, include/exclude patterns, and namespace packages. This is a wrapper
|
|
28
|
+
around setuptools' find_packages and find_namespace_packages functions with
|
|
29
|
+
additional filtering capabilities.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
depth: Optional maximum depth of package nesting to include (None for unlimited)
|
|
33
|
+
include_namespace_packages: Whether to include namespace packages
|
|
34
|
+
where: Directory to search for packages (default: current directory)
|
|
35
|
+
exclude: Patterns of package names to exclude
|
|
36
|
+
include: Patterns of package names to include
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
A list of package names as strings
|
|
40
|
+
|
|
41
|
+
Example:
|
|
42
|
+
find_packages(depth=1) might return ["package1", "package2"]
|
|
43
|
+
|
|
44
|
+
"""
|
|
45
|
+
gitignore_path = Path(".gitignore")
|
|
46
|
+
if exclude is None:
|
|
47
|
+
exclude = (
|
|
48
|
+
gitignore_path.read_text(encoding="utf-8").splitlines()
|
|
49
|
+
if gitignore_path.exists()
|
|
50
|
+
else []
|
|
51
|
+
)
|
|
52
|
+
exclude = [
|
|
53
|
+
p.replace("/", ".").removesuffix(".") for p in exclude if p.endswith("/")
|
|
54
|
+
]
|
|
55
|
+
if include_namespace_packages:
|
|
56
|
+
package_names = _find_namespace_packages(
|
|
57
|
+
where=where, exclude=exclude, include=include
|
|
58
|
+
)
|
|
59
|
+
else:
|
|
60
|
+
package_names = _find_packages(where=where, exclude=exclude, include=include)
|
|
61
|
+
|
|
62
|
+
# Convert to list of strings explicitly
|
|
63
|
+
package_names_list: list[str] = list(map(str, package_names))
|
|
64
|
+
|
|
65
|
+
if depth is not None:
|
|
66
|
+
package_names_list = [p for p in package_names_list if p.count(".") <= depth]
|
|
67
|
+
|
|
68
|
+
return package_names_list
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_src_package() -> ModuleType:
|
|
72
|
+
"""Identify and return the main source package of the project.
|
|
73
|
+
|
|
74
|
+
Discovers the main source package by finding all top-level packages
|
|
75
|
+
and filtering out the test package. This is useful for automatically
|
|
76
|
+
determining the package that contains the actual implementation code.
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
The main source package as a module object
|
|
80
|
+
|
|
81
|
+
Raises:
|
|
82
|
+
StopIteration: If no source package can be found or
|
|
83
|
+
if only the test package exists
|
|
84
|
+
|
|
85
|
+
"""
|
|
86
|
+
package_names = find_packages(depth=0, include_namespace_packages=False)
|
|
87
|
+
package_paths = [to_path(p, is_package=True) for p in package_names]
|
|
88
|
+
pkg = next(
|
|
89
|
+
p for p in package_paths if p.name not in {TESTS_PACKAGE_NAME, DOCS_DIR_NAME}
|
|
90
|
+
)
|
|
91
|
+
pkg_name = pkg.name
|
|
92
|
+
|
|
93
|
+
return import_module(pkg_name)
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""Decorators for various purposes.
|
|
2
|
+
|
|
3
|
+
This module provides decorators for various purposes, including:
|
|
4
|
+
- Retry and Exponential Handling
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from collections.abc import Callable
|
|
8
|
+
from functools import wraps
|
|
9
|
+
from typing import Any, ParamSpec
|
|
10
|
+
|
|
11
|
+
from requests import RequestException
|
|
12
|
+
from tenacity import retry, retry_if_exception_type, stop_after_attempt
|
|
13
|
+
|
|
14
|
+
import pyrig
|
|
15
|
+
from pyrig import resources
|
|
16
|
+
from pyrig.src.git.git import git_add_file
|
|
17
|
+
from pyrig.src.modules.package import get_pkg_name_from_cwd
|
|
18
|
+
from pyrig.src.resource import get_resource_path
|
|
19
|
+
|
|
20
|
+
P = ParamSpec("P")
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def return_resource_file_content_on_exceptions(
|
|
24
|
+
resource_name: str,
|
|
25
|
+
exceptions: tuple[type[Exception], ...],
|
|
26
|
+
*,
|
|
27
|
+
overwrite_resource: bool = True,
|
|
28
|
+
**tenacity_kwargs: Any,
|
|
29
|
+
) -> Callable[[Callable[P, str]], Callable[P, str]]:
|
|
30
|
+
"""Return content of a resource file if func raises specific exceptions.
|
|
31
|
+
|
|
32
|
+
post_process: Optional function that takes the result and returns a new value.
|
|
33
|
+
overwrite_resource: If True, write the result to the resource file.
|
|
34
|
+
"""
|
|
35
|
+
resource_path = get_resource_path(resource_name, resources)
|
|
36
|
+
content = resource_path.read_text(encoding="utf-8").strip()
|
|
37
|
+
|
|
38
|
+
def decorator(func: Callable[P, str]) -> Callable[P, str]:
|
|
39
|
+
tenacity_decorator = retry(
|
|
40
|
+
retry=retry_if_exception_type(exception_types=exceptions),
|
|
41
|
+
stop=stop_after_attempt(
|
|
42
|
+
max_attempt_number=1
|
|
43
|
+
), # no retries, just catch once
|
|
44
|
+
retry_error_callback=lambda _state: content,
|
|
45
|
+
reraise=False,
|
|
46
|
+
**tenacity_kwargs,
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Apply tenacity decorator to the function once
|
|
50
|
+
decorated_func = tenacity_decorator(func)
|
|
51
|
+
|
|
52
|
+
@wraps(func)
|
|
53
|
+
def wrapper(*args: P.args, **kwargs: P.kwargs) -> str:
|
|
54
|
+
result = decorated_func(*args, **kwargs).strip()
|
|
55
|
+
if (
|
|
56
|
+
get_pkg_name_from_cwd() == pyrig.__name__
|
|
57
|
+
and overwrite_resource
|
|
58
|
+
and result != content
|
|
59
|
+
):
|
|
60
|
+
resource_path.write_text(result, encoding="utf-8")
|
|
61
|
+
git_add_file(resource_path)
|
|
62
|
+
return result
|
|
63
|
+
|
|
64
|
+
return wrapper
|
|
65
|
+
|
|
66
|
+
return decorator
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def return_resource_content_on_fetch_error(
|
|
70
|
+
resource_name: str,
|
|
71
|
+
) -> Callable[[Callable[P, str]], Callable[P, str]]:
|
|
72
|
+
"""Return content of a resource file if func raises a requests.HTTPError."""
|
|
73
|
+
exceptions = (RequestException,)
|
|
74
|
+
return return_resource_file_content_on_exceptions(
|
|
75
|
+
resource_name,
|
|
76
|
+
exceptions,
|
|
77
|
+
)
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
"""Testing decorators and pytest mark utilities.
|
|
2
|
+
|
|
3
|
+
This module provides convenience decorators for defining pytest fixtures
|
|
4
|
+
with specific scopes and skip conditions. It simplifies common patterns
|
|
5
|
+
like creating autouse fixtures or skipping tests in CI environments.
|
|
6
|
+
|
|
7
|
+
Example:
|
|
8
|
+
Using a scoped fixture decorator::
|
|
9
|
+
|
|
10
|
+
@session_fixture
|
|
11
|
+
def database_connection():
|
|
12
|
+
return create_connection()
|
|
13
|
+
|
|
14
|
+
Using an autouse fixture::
|
|
15
|
+
|
|
16
|
+
@autouse_module_fixture
|
|
17
|
+
def setup_logging():
|
|
18
|
+
configure_logging()
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
import functools
|
|
22
|
+
|
|
23
|
+
import pytest
|
|
24
|
+
|
|
25
|
+
from pyrig.src.git.git import running_in_github_actions
|
|
26
|
+
|
|
27
|
+
#: Skip marker for fixture tests that cannot be called directly.
|
|
28
|
+
skip_fixture_test: pytest.MarkDecorator = functools.partial(
|
|
29
|
+
pytest.mark.skip,
|
|
30
|
+
reason="Fixtures are not testable bc they cannot be called directly.",
|
|
31
|
+
)()
|
|
32
|
+
|
|
33
|
+
#: Skip marker for tests that cannot run in GitHub Actions.
|
|
34
|
+
skip_in_github_actions: pytest.MarkDecorator = functools.partial(
|
|
35
|
+
pytest.mark.skipif,
|
|
36
|
+
running_in_github_actions(),
|
|
37
|
+
reason="Test cannot run in GitHub action.",
|
|
38
|
+
)()
|
|
39
|
+
|
|
40
|
+
#: Decorator for function-scoped fixtures.
|
|
41
|
+
function_fixture = functools.partial(pytest.fixture, scope="function")
|
|
42
|
+
#: Decorator for class-scoped fixtures.
|
|
43
|
+
class_fixture = functools.partial(pytest.fixture, scope="class")
|
|
44
|
+
#: Decorator for module-scoped fixtures.
|
|
45
|
+
module_fixture = functools.partial(pytest.fixture, scope="module")
|
|
46
|
+
#: Decorator for package-scoped fixtures.
|
|
47
|
+
package_fixture = functools.partial(pytest.fixture, scope="package")
|
|
48
|
+
#: Decorator for session-scoped fixtures.
|
|
49
|
+
session_fixture = functools.partial(pytest.fixture, scope="session")
|
|
50
|
+
|
|
51
|
+
#: Decorator for autouse function-scoped fixtures.
|
|
52
|
+
autouse_function_fixture = functools.partial(
|
|
53
|
+
pytest.fixture, scope="function", autouse=True
|
|
54
|
+
)
|
|
55
|
+
#: Decorator for autouse class-scoped fixtures.
|
|
56
|
+
autouse_class_fixture = functools.partial(pytest.fixture, scope="class", autouse=True)
|
|
57
|
+
#: Decorator for autouse module-scoped fixtures.
|
|
58
|
+
autouse_module_fixture = functools.partial(pytest.fixture, scope="module", autouse=True)
|
|
59
|
+
#: Decorator for autouse package-scoped fixtures.
|
|
60
|
+
autouse_package_fixture = functools.partial(
|
|
61
|
+
pytest.fixture, scope="package", autouse=True
|
|
62
|
+
)
|
|
63
|
+
#: Decorator for autouse session-scoped fixtures.
|
|
64
|
+
autouse_session_fixture = functools.partial(
|
|
65
|
+
pytest.fixture, scope="session", autouse=True
|
|
66
|
+
)
|