proj-flow 0.8.1__py3-none-any.whl → 0.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- proj_flow/__init__.py +1 -1
- proj_flow/__main__.py +2 -2
- proj_flow/api/__init__.py +11 -2
- proj_flow/api/arg.py +14 -6
- proj_flow/api/env.py +15 -35
- proj_flow/api/release.py +99 -0
- proj_flow/api/step.py +12 -2
- proj_flow/base/__init__.py +2 -2
- proj_flow/base/inspect.py +15 -44
- proj_flow/base/plugins.py +41 -2
- proj_flow/base/registry.py +105 -0
- proj_flow/cli/__init__.py +55 -0
- proj_flow/cli/argument.py +450 -0
- proj_flow/{flow/cli → cli}/finder.py +1 -1
- proj_flow/ext/__init__.py +6 -0
- proj_flow/ext/github/__init__.py +11 -0
- proj_flow/ext/github/cli.py +125 -0
- proj_flow/ext/github/hosting.py +19 -0
- proj_flow/ext/markdown_changelist.py +14 -0
- proj_flow/ext/python/__init__.py +10 -0
- proj_flow/ext/python/rtdocs.py +238 -0
- proj_flow/ext/python/steps.py +71 -0
- proj_flow/ext/python/version.py +98 -0
- proj_flow/ext/re_structured_changelist.py +14 -0
- proj_flow/flow/__init__.py +3 -3
- proj_flow/flow/configs.py +21 -5
- proj_flow/flow/dependency.py +8 -6
- proj_flow/flow/steps.py +6 -9
- proj_flow/log/__init__.py +10 -2
- proj_flow/log/commit.py +19 -4
- proj_flow/log/error.py +31 -0
- proj_flow/log/hosting/github.py +10 -6
- proj_flow/log/msg.py +23 -0
- proj_flow/log/release.py +112 -21
- proj_flow/log/rich_text/__init__.py +0 -12
- proj_flow/log/rich_text/api.py +10 -4
- proj_flow/minimal/__init__.py +11 -0
- proj_flow/{plugins/commands → minimal}/bootstrap.py +2 -2
- proj_flow/{plugins/commands → minimal}/list.py +12 -10
- proj_flow/{plugins/commands → minimal}/run.py +20 -11
- proj_flow/{plugins/commands → minimal}/system.py +2 -2
- proj_flow/plugins/__init__.py +1 -1
- proj_flow/template/layers/base/.flow/matrix.yml +1 -1
- proj_flow/template/layers/cmake/CMakeLists.txt.mustache +1 -1
- proj_flow/template/layers/github_actions/.github/workflows/build.yml +1 -1
- proj_flow/template/layers/github_social/.github/ISSUE_TEMPLATE/feature_request.md.mustache +1 -1
- {proj_flow-0.8.1.dist-info → proj_flow-0.9.1.dist-info}/METADATA +6 -5
- {proj_flow-0.8.1.dist-info → proj_flow-0.9.1.dist-info}/RECORD +51 -37
- proj_flow-0.9.1.dist-info/entry_points.txt +2 -0
- proj_flow/flow/cli/__init__.py +0 -66
- proj_flow/flow/cli/cmds.py +0 -385
- proj_flow-0.8.1.dist-info/entry_points.txt +0 -2
- {proj_flow-0.8.1.dist-info → proj_flow-0.9.1.dist-info}/WHEEL +0 -0
- {proj_flow-0.8.1.dist-info → proj_flow-0.9.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
# Copyright (c) 2025 Marcin Zdun
|
|
2
|
+
# This code is licensed under MIT license (see LICENSE for details)
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
The **proj_flow.ext.python.steps.rtdocs** defines RTDocs step, which uses
|
|
6
|
+
.readthedocs.yaml to build the HTML documentation.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import shutil
|
|
11
|
+
import subprocess
|
|
12
|
+
import sys
|
|
13
|
+
from abc import ABC, abstractmethod
|
|
14
|
+
from contextlib import contextmanager
|
|
15
|
+
from typing import Any, Callable, Dict, List, Optional, cast
|
|
16
|
+
|
|
17
|
+
from proj_flow.api import env, step
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@step.register
|
|
21
|
+
class RTDocs:
|
|
22
|
+
name = "RTD"
|
|
23
|
+
|
|
24
|
+
def platform_dependencies(self):
|
|
25
|
+
return ["python -m PyYAML"]
|
|
26
|
+
|
|
27
|
+
def is_active(self, config: env.Config, rt: env.Runtime) -> bool:
|
|
28
|
+
return os.path.isfile(os.path.join(rt.root, ".readthedocs.yaml"))
|
|
29
|
+
|
|
30
|
+
def run(self, config: env.Config, rt: env.Runtime) -> int:
|
|
31
|
+
import venv
|
|
32
|
+
|
|
33
|
+
import yaml
|
|
34
|
+
|
|
35
|
+
with open(os.path.join(rt.root, ".readthedocs.yaml")) as rtd_yaml:
|
|
36
|
+
data = yaml.load(rtd_yaml, Loader=yaml.Loader)
|
|
37
|
+
|
|
38
|
+
formats = ["html"]
|
|
39
|
+
|
|
40
|
+
build_jobs = cast(
|
|
41
|
+
Dict[str, List[str]], data.get("build", {}).get("jobs", {})
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
sphinx_configuration = cast(
|
|
45
|
+
Optional[str], data.get("sphinx", {}).get("configuration")
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
python_install = cast(
|
|
49
|
+
List[Dict[str, Any]], data.get("python", {}).get("install", [])
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
builder: Optional[Builder] = None
|
|
53
|
+
|
|
54
|
+
if sphinx_configuration:
|
|
55
|
+
sphinx_configuration = os.path.join(rt.root, sphinx_configuration)
|
|
56
|
+
builder = Sphinx(sphinx_configuration)
|
|
57
|
+
|
|
58
|
+
READTHEDOCS_OUTPUT = (
|
|
59
|
+
builder.READTHEDOCS_OUTPUT
|
|
60
|
+
if builder is not None
|
|
61
|
+
else os.path.join(rt.root, "docs/build")
|
|
62
|
+
)
|
|
63
|
+
os.environ["READTHEDOCS_OUTPUT"] = READTHEDOCS_OUTPUT
|
|
64
|
+
os.environ["READTHEDOCS"] = "True"
|
|
65
|
+
|
|
66
|
+
jobs: Dict[str, Callable[[], int]] = {
|
|
67
|
+
"create_environment": lambda: _activate_virtual_env(
|
|
68
|
+
venv, os.path.dirname(READTHEDOCS_OUTPUT)
|
|
69
|
+
),
|
|
70
|
+
}
|
|
71
|
+
if len(python_install):
|
|
72
|
+
jobs["install"] = lambda: _install(python_install)
|
|
73
|
+
|
|
74
|
+
if builder:
|
|
75
|
+
for format in formats:
|
|
76
|
+
jobs[f"build/{format}"] = builder.wrap(format)
|
|
77
|
+
|
|
78
|
+
for name in build_jobs:
|
|
79
|
+
if name != "build":
|
|
80
|
+
jobs[name] = lambda: _script(build_jobs[name])
|
|
81
|
+
continue
|
|
82
|
+
|
|
83
|
+
build_jobs_build = cast(Dict[str, List[str]], build_jobs["build"])
|
|
84
|
+
for format in formats:
|
|
85
|
+
if format not in build_jobs_build:
|
|
86
|
+
continue
|
|
87
|
+
jobs[f"build/{format}"] = lambda: _script(build_jobs_build[name])
|
|
88
|
+
|
|
89
|
+
for job in _job_listing:
|
|
90
|
+
try:
|
|
91
|
+
impl = jobs[job]
|
|
92
|
+
except KeyError:
|
|
93
|
+
continue
|
|
94
|
+
print(f"-- {job}")
|
|
95
|
+
result = impl()
|
|
96
|
+
if result:
|
|
97
|
+
return 1
|
|
98
|
+
return 0
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class Builder(ABC):
|
|
102
|
+
@property
|
|
103
|
+
@abstractmethod
|
|
104
|
+
def READTHEDOCS_OUTPUT(self) -> str: ...
|
|
105
|
+
|
|
106
|
+
@abstractmethod
|
|
107
|
+
def build(self, target: str) -> int: ...
|
|
108
|
+
|
|
109
|
+
def wrap(self, target: str) -> Callable[[], int]:
|
|
110
|
+
return lambda: self.build(target)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
class Sphinx(Builder):
|
|
114
|
+
READTHEDOCS_OUTPUT: str = ""
|
|
115
|
+
|
|
116
|
+
def __init__(self, config: str):
|
|
117
|
+
self.config = config
|
|
118
|
+
self.source = os.path.dirname(config)
|
|
119
|
+
self.READTHEDOCS_OUTPUT = os.path.join(os.path.dirname(self.source), "build")
|
|
120
|
+
|
|
121
|
+
def build(self, target: str):
|
|
122
|
+
builder = "latex" if target == "pdf" else target
|
|
123
|
+
return subprocess.run(
|
|
124
|
+
["sphinx-build", "-M", builder, self.source, self.READTHEDOCS_OUTPUT],
|
|
125
|
+
shell=False,
|
|
126
|
+
).returncode
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
PYTHON_EXECUTABLE = sys.executable
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def _python(
|
|
133
|
+
*args: str,
|
|
134
|
+
module: Optional[str] = None,
|
|
135
|
+
capture_output: bool = True,
|
|
136
|
+
) -> subprocess.CompletedProcess:
|
|
137
|
+
if module is not None:
|
|
138
|
+
return subprocess.run(
|
|
139
|
+
[PYTHON_EXECUTABLE, "-m", module, *args],
|
|
140
|
+
shell=False,
|
|
141
|
+
capture_output=capture_output,
|
|
142
|
+
)
|
|
143
|
+
return subprocess.run(
|
|
144
|
+
[PYTHON_EXECUTABLE, *args], shell=False, capture_output=capture_output
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def _pip(*args: str, capture_output: bool = False):
|
|
149
|
+
return _python(*args, module="pip", capture_output=capture_output)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
_build_targets = [
|
|
153
|
+
"html",
|
|
154
|
+
"htmlzip",
|
|
155
|
+
"pdf",
|
|
156
|
+
"epub",
|
|
157
|
+
]
|
|
158
|
+
|
|
159
|
+
_job_listing = [
|
|
160
|
+
# "post_checkout",
|
|
161
|
+
# "pre_system_dependencies",
|
|
162
|
+
# "post_system_dependencies",
|
|
163
|
+
"pre_create_environment",
|
|
164
|
+
"create_environment",
|
|
165
|
+
"post_create_environment",
|
|
166
|
+
"pre_install",
|
|
167
|
+
"install",
|
|
168
|
+
"post_install",
|
|
169
|
+
"pre_build",
|
|
170
|
+
*(f"build/{tgt}" for tgt in _build_targets),
|
|
171
|
+
"post_build",
|
|
172
|
+
]
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
@contextmanager
|
|
176
|
+
def _cd(path: str):
|
|
177
|
+
prev = os.getcwd()
|
|
178
|
+
os.chdir(path)
|
|
179
|
+
try:
|
|
180
|
+
yield
|
|
181
|
+
finally:
|
|
182
|
+
os.chdir(prev)
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _get_venv_path(root: str):
|
|
186
|
+
bindir = os.path.join(".venv", "bin")
|
|
187
|
+
scripts = os.path.join(".venv", "Scripts")
|
|
188
|
+
|
|
189
|
+
if os.path.isdir(os.path.join(root, bindir)):
|
|
190
|
+
return bindir
|
|
191
|
+
|
|
192
|
+
if os.path.isdir(os.path.join(root, scripts)):
|
|
193
|
+
return scripts
|
|
194
|
+
|
|
195
|
+
return None
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _activate_virtual_env(venv, root: str):
|
|
199
|
+
global PYTHON_EXECUTABLE
|
|
200
|
+
|
|
201
|
+
with _cd(root):
|
|
202
|
+
exec_ext = ".exe" if sys.platform == "win32" else ""
|
|
203
|
+
python_exec = f"python{exec_ext}"
|
|
204
|
+
bindir = _get_venv_path(root)
|
|
205
|
+
has_venv = bindir is not None and os.path.isfile(
|
|
206
|
+
os.path.join(bindir, python_exec)
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
if not has_venv:
|
|
210
|
+
venv.create(".venv", with_pip=True, upgrade_deps=True)
|
|
211
|
+
bindir = _get_venv_path(root)
|
|
212
|
+
|
|
213
|
+
if bindir:
|
|
214
|
+
PATH = f"{os.path.abspath(bindir)}{os.pathsep}{os.environ['PATH']}"
|
|
215
|
+
os.environ["PATH"] = PATH
|
|
216
|
+
PYTHON_EXECUTABLE = shutil.which("python") or sys.executable
|
|
217
|
+
return 0
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _install(deps: List[Dict[str, Any]]):
|
|
221
|
+
for dep in deps:
|
|
222
|
+
try:
|
|
223
|
+
requirements = dep["requirements"]
|
|
224
|
+
except KeyError:
|
|
225
|
+
continue
|
|
226
|
+
|
|
227
|
+
result = _pip("install", "-q", "-r", requirements).returncode
|
|
228
|
+
if result:
|
|
229
|
+
return result
|
|
230
|
+
return 0
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def _script(calls: List[str]):
|
|
234
|
+
for call in calls:
|
|
235
|
+
result = subprocess.run(call, shell=True).returncode
|
|
236
|
+
if result:
|
|
237
|
+
return result
|
|
238
|
+
return 0
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# Copyright (c) 2025 Marcin Zdun
|
|
2
|
+
# This code is licensed under MIT license (see LICENSE for details)
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
The **proj_flow.ext.python.steps** defines steps for building, installing and
|
|
6
|
+
documenting.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import importlib
|
|
10
|
+
import os
|
|
11
|
+
|
|
12
|
+
from proj_flow.api import env, release, step
|
|
13
|
+
|
|
14
|
+
from . import rtdocs
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@step.register
|
|
18
|
+
class Install:
|
|
19
|
+
name = "Install"
|
|
20
|
+
|
|
21
|
+
def platform_dependencies(self):
|
|
22
|
+
return ["python -m pip"]
|
|
23
|
+
|
|
24
|
+
def run(self, config: env.Config, rt: env.Runtime) -> int:
|
|
25
|
+
return rt.cmd("python", "-m", "pip", "install", rt.root)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@step.register
|
|
29
|
+
class Build:
|
|
30
|
+
name = "Build"
|
|
31
|
+
|
|
32
|
+
def platform_dependencies(self):
|
|
33
|
+
return ["python -m build"]
|
|
34
|
+
|
|
35
|
+
def run(self, config: env.Config, rt: env.Runtime) -> int:
|
|
36
|
+
build_main = importlib.import_module("build.__main__")
|
|
37
|
+
build_main.main([], "proj-flow build")
|
|
38
|
+
return 0
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@step.register
|
|
42
|
+
class CheckTwine:
|
|
43
|
+
name = "Check Twine"
|
|
44
|
+
|
|
45
|
+
runs_after = ["Build"]
|
|
46
|
+
|
|
47
|
+
def platform_dependencies(self):
|
|
48
|
+
return ["twine"]
|
|
49
|
+
|
|
50
|
+
def run(self, config: env.Config, rt: env.Runtime) -> int:
|
|
51
|
+
filenames = []
|
|
52
|
+
for root, dirnames, filenames in os.walk("dist"):
|
|
53
|
+
dirnames[:] = []
|
|
54
|
+
|
|
55
|
+
_, project = release.project_suites.find(lambda suite: suite.get_project(rt))
|
|
56
|
+
archive_name = project and project.archive_name
|
|
57
|
+
if archive_name:
|
|
58
|
+
dot_suffix = f"{archive_name}."
|
|
59
|
+
dash_suffix = f"{archive_name}-"
|
|
60
|
+
filenames = [
|
|
61
|
+
filename
|
|
62
|
+
for filename in filenames
|
|
63
|
+
if filename.startswith(dot_suffix) or filename.startswith(dash_suffix)
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
if len(filenames) == 0:
|
|
67
|
+
return 0
|
|
68
|
+
|
|
69
|
+
return rt.cmd(
|
|
70
|
+
"twine", "check", *(os.path.join(root, filename) for filename in filenames)
|
|
71
|
+
)
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
# Copyright (c) 2025 Marcin Zdun
|
|
2
|
+
# This code is licensed under MIT license (see LICENSE for details)
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
The **proj_flow.ext.python.steps** defines steps for building, installing and
|
|
6
|
+
documenting.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import os
|
|
10
|
+
import re
|
|
11
|
+
from typing import NamedTuple, Optional
|
|
12
|
+
|
|
13
|
+
import toml
|
|
14
|
+
|
|
15
|
+
from proj_flow.api import env, release
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class QuickProjectInfo(NamedTuple):
|
|
19
|
+
name: Optional[str] = None
|
|
20
|
+
path: Optional[str] = None
|
|
21
|
+
pattern: Optional[str] = None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@release.project_suites.add
|
|
25
|
+
class ProjectSuite(release.ProjectSuite):
|
|
26
|
+
def get_project(self, rt: env.Runtime) -> Optional[release.Project]:
|
|
27
|
+
name, path, pattern = self._pyproject_hatch(rt)
|
|
28
|
+
if name is None or path is None:
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
with open(os.path.join(rt.root, path), encoding="UTF-8") as infile:
|
|
33
|
+
text = infile.read()
|
|
34
|
+
except FileNotFoundError:
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
if pattern is not None:
|
|
38
|
+
candidate = self._check(text, pattern)
|
|
39
|
+
return release.Project(name, candidate) if candidate else None
|
|
40
|
+
|
|
41
|
+
for varname in ["__version__", "VERSION"]:
|
|
42
|
+
for end in ['"', "'"]:
|
|
43
|
+
pattern = rf"{varname}\s*=\s*{end}v?(?P<version>[^{end}]+){end}"
|
|
44
|
+
candidate = self._check(text, pattern)
|
|
45
|
+
if candidate:
|
|
46
|
+
return release.Project(name, candidate)
|
|
47
|
+
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
def _check(self, text: str, pattern: str):
|
|
51
|
+
m = re.search(f"^{pattern}", text, flags=re.MULTILINE)
|
|
52
|
+
if not m:
|
|
53
|
+
return None
|
|
54
|
+
version, start = m.group("version"), m.start("version")
|
|
55
|
+
core = re.split(r"([0-9]+\.[0-9]+\.[0-9]+)", version, maxsplit=1)[1]
|
|
56
|
+
stability = version[len(core) :]
|
|
57
|
+
|
|
58
|
+
return release.Version(
|
|
59
|
+
release.Arg(core, start), release.Arg(stability, start + len(core))
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
def get_version_file_path(self, rt: env.Runtime) -> Optional[str]:
|
|
63
|
+
_, path, _ = self._pyproject_hatch(rt)
|
|
64
|
+
return path
|
|
65
|
+
|
|
66
|
+
def _pyproject_hatch(self, rt: env.Runtime):
|
|
67
|
+
pyproject_path = os.path.join(rt.root, "pyproject.toml")
|
|
68
|
+
try:
|
|
69
|
+
data = toml.load(pyproject_path)
|
|
70
|
+
project = data.get("project", {})
|
|
71
|
+
hatch = data.get("tool", {}).get("hatch", {})
|
|
72
|
+
wheels = (
|
|
73
|
+
hatch.get("build", {})
|
|
74
|
+
.get("targets", {})
|
|
75
|
+
.get("wheel", {})
|
|
76
|
+
.get("packages", [])
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
name = project.get("name")
|
|
80
|
+
if len(wheels) > 0:
|
|
81
|
+
first_wheel = wheels[0].split("/")[-1]
|
|
82
|
+
if first_wheel:
|
|
83
|
+
name = first_wheel
|
|
84
|
+
|
|
85
|
+
dynamic = project.get("dynamic", [])
|
|
86
|
+
if "version" in dynamic:
|
|
87
|
+
version_dict = hatch.get("version", {})
|
|
88
|
+
return QuickProjectInfo(
|
|
89
|
+
name=name,
|
|
90
|
+
path=version_dict.get("path"),
|
|
91
|
+
pattern=version_dict.get("pattern"),
|
|
92
|
+
)
|
|
93
|
+
return QuickProjectInfo(
|
|
94
|
+
name=name,
|
|
95
|
+
path="pyproject.toml",
|
|
96
|
+
)
|
|
97
|
+
except FileNotFoundError:
|
|
98
|
+
return QuickProjectInfo()
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# Copyright (c) 2025 Marcin Zdun
|
|
2
|
+
# This code is licensed under MIT license (see LICENSE for details)
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
The **proj_flow.ext.re_structured_text** .
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from proj_flow.log.rich_text.api import changelog_generators
|
|
9
|
+
from proj_flow.log.rich_text.re_structured_text import ChangelogGenerator
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@changelog_generators.add
|
|
13
|
+
class Plugin(ChangelogGenerator):
|
|
14
|
+
pass
|
proj_flow/flow/__init__.py
CHANGED
|
@@ -2,10 +2,10 @@
|
|
|
2
2
|
# This code is licensed under MIT license (see LICENSE for details)
|
|
3
3
|
|
|
4
4
|
"""
|
|
5
|
-
The **proj_flow.flow** contains the inner workings of various *
|
|
5
|
+
The **proj_flow.flow** contains the inner workings of various *Project Flow*
|
|
6
6
|
components.
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
|
-
from . import
|
|
9
|
+
from . import configs, dependency, init, interact, layer, steps
|
|
10
10
|
|
|
11
|
-
__all__ = ["
|
|
11
|
+
__all__ = ["configs", "dependency", "init", "interact", "layer", "steps"]
|
proj_flow/flow/configs.py
CHANGED
|
@@ -17,7 +17,9 @@ from proj_flow.base import matrix
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
def _compiler_inner(
|
|
20
|
-
value: str,
|
|
20
|
+
value: str,
|
|
21
|
+
used_compilers: Dict[str, List[List[str]]],
|
|
22
|
+
config_names: Dict[str, List[str]],
|
|
21
23
|
):
|
|
22
24
|
compiler, names = matrix.find_compiler(value, config_names)
|
|
23
25
|
if compiler not in used_compilers:
|
|
@@ -26,7 +28,9 @@ def _compiler_inner(
|
|
|
26
28
|
return compiler
|
|
27
29
|
|
|
28
30
|
|
|
29
|
-
def _compiler(
|
|
31
|
+
def _compiler(
|
|
32
|
+
used_compilers: Dict[str, List[List[str]]], config_names: Dict[str, List[str]]
|
|
33
|
+
):
|
|
30
34
|
return lambda value: _compiler_inner(value, used_compilers, config_names)
|
|
31
35
|
|
|
32
36
|
|
|
@@ -43,12 +47,15 @@ _TRUE = {"true", "on", "yes", "1"}
|
|
|
43
47
|
_boolean_sanitizer = _boolean("with-sanitizer")
|
|
44
48
|
|
|
45
49
|
|
|
46
|
-
def _types(
|
|
50
|
+
def _types(
|
|
51
|
+
used_compilers: Dict[str, List[List[str]]], config_names: Dict[str, List[str]]
|
|
52
|
+
):
|
|
47
53
|
return {
|
|
48
54
|
"compiler": _compiler(used_compilers, config_names),
|
|
49
55
|
"sanitizer": _boolean_sanitizer,
|
|
50
56
|
}
|
|
51
57
|
|
|
58
|
+
|
|
52
59
|
def _config(config: List[str], only_host: bool, types: Dict[str, Callable[[str], Any]]):
|
|
53
60
|
args = {}
|
|
54
61
|
for arg in config:
|
|
@@ -112,10 +119,15 @@ def _load_flow_data(rt: env.Runtime):
|
|
|
112
119
|
class Configs:
|
|
113
120
|
usable: List[env.Config] = []
|
|
114
121
|
|
|
115
|
-
def __init__(
|
|
116
|
-
|
|
122
|
+
def __init__(
|
|
123
|
+
self, rt: env.Runtime, args: argparse.Namespace, expand_compilers=True
|
|
124
|
+
):
|
|
117
125
|
configs, keys = _load_flow_data(rt)
|
|
118
126
|
|
|
127
|
+
if len(configs) == 0 and len(keys) == 0:
|
|
128
|
+
self.usable = [env.Config({}, keys)]
|
|
129
|
+
return
|
|
130
|
+
|
|
119
131
|
used_compilers: Dict[str, List[List[str]]] = {}
|
|
120
132
|
|
|
121
133
|
types = _types(used_compilers=used_compilers, config_names=rt.compiler_names)
|
|
@@ -140,6 +152,10 @@ class Configs:
|
|
|
140
152
|
or not matrix.matches_any(config, postproc_exclude)
|
|
141
153
|
]
|
|
142
154
|
|
|
155
|
+
if not expand_compilers:
|
|
156
|
+
self.usable = [env.Config(conf, keys) for conf in usable]
|
|
157
|
+
return
|
|
158
|
+
|
|
143
159
|
self.usable = []
|
|
144
160
|
for conf in usable:
|
|
145
161
|
try:
|
proj_flow/flow/dependency.py
CHANGED
|
@@ -10,7 +10,7 @@ import sys
|
|
|
10
10
|
from dataclasses import dataclass
|
|
11
11
|
from enum import Enum
|
|
12
12
|
from importlib.metadata import version as package_version
|
|
13
|
-
from typing import Callable, List, Set, Tuple
|
|
13
|
+
from typing import Callable, List, Set, Tuple, cast
|
|
14
14
|
|
|
15
15
|
from proj_flow.base import cmd
|
|
16
16
|
|
|
@@ -27,7 +27,7 @@ def _ver(ver: str) -> Version:
|
|
|
27
27
|
chunks = [int(v.strip()) for v in ver.split(".")]
|
|
28
28
|
while len(chunks) < 3:
|
|
29
29
|
chunks.append(0)
|
|
30
|
-
return (*chunks[:3],)
|
|
30
|
+
return cast(Tuple[int, int, int], (*chunks[:3],))
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
@dataclass
|
|
@@ -121,8 +121,8 @@ def verify(deps: List[Dependency]):
|
|
|
121
121
|
for pkg in (dep for dep in uniq if dep.kind == DepKind.PYTHON_PKG):
|
|
122
122
|
try:
|
|
123
123
|
version = package_version(pkg.name)
|
|
124
|
-
except:
|
|
125
|
-
errors.add(f"{pkg.name}: Python package is missing")
|
|
124
|
+
except Exception as ex:
|
|
125
|
+
errors.add(f"{pkg.name}: Python package is missing: {ex}")
|
|
126
126
|
continue
|
|
127
127
|
msg = pkg.match_version(version)
|
|
128
128
|
if msg is not None:
|
|
@@ -133,7 +133,9 @@ def verify(deps: List[Dependency]):
|
|
|
133
133
|
errors.add(f"{app.name}: tool is missing")
|
|
134
134
|
continue
|
|
135
135
|
proc = cmd.run(app.name, "--version", capture_output=True)
|
|
136
|
-
if proc
|
|
136
|
+
if not proc:
|
|
137
|
+
version = None
|
|
138
|
+
elif proc.returncode:
|
|
137
139
|
if proc.stderr:
|
|
138
140
|
print(proc.stderr.rstrip(), file=sys.stderr)
|
|
139
141
|
version = None
|
|
@@ -146,7 +148,7 @@ def verify(deps: List[Dependency]):
|
|
|
146
148
|
f"{app.name}: could not read version for `{app.version_expression}`"
|
|
147
149
|
)
|
|
148
150
|
continue
|
|
149
|
-
msg = app.match_version(version)
|
|
151
|
+
msg = version and app.match_version(version)
|
|
150
152
|
if msg is not None:
|
|
151
153
|
errors.add(msg)
|
|
152
154
|
|
proj_flow/flow/steps.py
CHANGED
|
@@ -6,15 +6,10 @@ The **proj_flow.flow.steps** allows loading both predefined and project-specific
|
|
|
6
6
|
steps.
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
|
-
import importlib
|
|
10
|
-
import os
|
|
11
|
-
import sys
|
|
12
9
|
from dataclasses import dataclass
|
|
13
|
-
from
|
|
14
|
-
from typing import List, Optional, Union, cast
|
|
10
|
+
from typing import List, cast
|
|
15
11
|
|
|
16
12
|
from proj_flow.api import env, step
|
|
17
|
-
from proj_flow.base.plugins import load_module_plugins
|
|
18
13
|
|
|
19
14
|
|
|
20
15
|
@dataclass
|
|
@@ -43,6 +38,7 @@ def _sort_steps():
|
|
|
43
38
|
if successor.name != name:
|
|
44
39
|
continue
|
|
45
40
|
successor.runs_after.append(plugin.name)
|
|
41
|
+
break
|
|
46
42
|
|
|
47
43
|
for plugin in unsorted:
|
|
48
44
|
runs_after: List[str] = []
|
|
@@ -71,12 +67,14 @@ def _sort_steps():
|
|
|
71
67
|
|
|
72
68
|
|
|
73
69
|
def clean_aliases(cfg: env.FlowConfig):
|
|
70
|
+
cfg_steps = _sort_steps()
|
|
71
|
+
cfg.steps = cfg_steps
|
|
72
|
+
|
|
74
73
|
entries = cfg.entry
|
|
75
74
|
if not entries:
|
|
76
75
|
return
|
|
77
76
|
|
|
78
|
-
|
|
79
|
-
step_names = {step.name for step in valid_steps}
|
|
77
|
+
step_names = {step.name for step in cfg_steps}
|
|
80
78
|
|
|
81
79
|
keys_to_remove: List[str] = []
|
|
82
80
|
for key in entries:
|
|
@@ -100,5 +98,4 @@ def clean_aliases(cfg: env.FlowConfig):
|
|
|
100
98
|
for key in keys_to_remove:
|
|
101
99
|
del entries[key]
|
|
102
100
|
|
|
103
|
-
cfg.steps = valid_steps
|
|
104
101
|
cfg.aliases = [env.RunAlias.from_json(key, entries[key]) for key in entries]
|
proj_flow/log/__init__.py
CHANGED
|
@@ -5,6 +5,14 @@
|
|
|
5
5
|
The **proj_flow.log** defines tools for generating various changelog views.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
from . import commit, fmt, hosting, msg, release, rich_text
|
|
8
|
+
from . import commit, error, fmt, hosting, msg, release, rich_text
|
|
9
9
|
|
|
10
|
-
__all__ = [
|
|
10
|
+
__all__ = [
|
|
11
|
+
"commit",
|
|
12
|
+
"error",
|
|
13
|
+
"fmt",
|
|
14
|
+
"hosting",
|
|
15
|
+
"msg",
|
|
16
|
+
"release",
|
|
17
|
+
"rich_text",
|
|
18
|
+
]
|
proj_flow/log/commit.py
CHANGED
|
@@ -16,6 +16,7 @@ from enum import Enum
|
|
|
16
16
|
from typing import Dict, List, NamedTuple, Optional, Tuple
|
|
17
17
|
|
|
18
18
|
from proj_flow.api import env
|
|
19
|
+
from proj_flow.base import registry
|
|
19
20
|
|
|
20
21
|
COMMIT_SEP = "--{}".format(
|
|
21
22
|
"".join(secrets.choice(string.ascii_letters + string.digits) for i in range(20))
|
|
@@ -227,8 +228,12 @@ class Hosting(ABC):
|
|
|
227
228
|
...
|
|
228
229
|
|
|
229
230
|
@abstractmethod
|
|
230
|
-
def
|
|
231
|
-
self,
|
|
231
|
+
def add_release(
|
|
232
|
+
self,
|
|
233
|
+
log: ChangeLog,
|
|
234
|
+
setup: "LogSetup",
|
|
235
|
+
git: "Git",
|
|
236
|
+
draft: bool,
|
|
232
237
|
) -> ReleaseInfo:
|
|
233
238
|
"""
|
|
234
239
|
Publish a release for current setup, putting the log into release
|
|
@@ -258,8 +263,8 @@ class NoHosting(Hosting):
|
|
|
258
263
|
def reference_link(self, ref: str) -> Optional[str]:
|
|
259
264
|
return None
|
|
260
265
|
|
|
261
|
-
def
|
|
262
|
-
self, log: ChangeLog, setup: "LogSetup", git: "Git"
|
|
266
|
+
def add_release(
|
|
267
|
+
self, log: ChangeLog, setup: "LogSetup", git: "Git", draft: bool
|
|
263
268
|
) -> ReleaseInfo:
|
|
264
269
|
return ReleaseInfo(draft_url=None)
|
|
265
270
|
|
|
@@ -461,3 +466,13 @@ class Git:
|
|
|
461
466
|
|
|
462
467
|
def push_with_refs(self, remote: str, branch: str):
|
|
463
468
|
return self.cmd("push", remote, branch, "--follow-tags", "--force-with-lease")
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
class HostingFactory(ABC):
|
|
472
|
+
@abstractmethod
|
|
473
|
+
def from_repo(
|
|
474
|
+
self, git: Git, remote: Optional[str] = None
|
|
475
|
+
) -> Optional[Hosting]: ...
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
hosting_factories = registry.Registry[HostingFactory]("HostingFactory")
|
proj_flow/log/error.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
# Copyright (c) 2024 Marcin Zdun
|
|
2
|
+
# This code is licensed under MIT license (see LICENSE for details)
|
|
3
|
+
|
|
4
|
+
"""
|
|
5
|
+
The **proj_flow.log.error** declares a number of exceptions, which can be
|
|
6
|
+
raised during the changelog operations.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ReleaseError(Exception):
|
|
11
|
+
def __init__(self, message: str):
|
|
12
|
+
super().__init__()
|
|
13
|
+
self.message = message
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class NoProjectError(ReleaseError):
|
|
17
|
+
def __init__(self):
|
|
18
|
+
super().__init__("No project definition found.")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class TagExistsError(ReleaseError):
|
|
22
|
+
def __init__(self, tag: str):
|
|
23
|
+
super().__init__(f"Tag {tag}] already exists.")
|
|
24
|
+
self.tag = tag
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class VersionNotAdvancing(Exception):
|
|
28
|
+
def __init__(self, version: str):
|
|
29
|
+
super().__init__()
|
|
30
|
+
self.message = f"[{version}] Version did not change, not doing anything."
|
|
31
|
+
self.version = version
|