pysfi 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pysfi-0.1.5.dist-info/METADATA +107 -0
- pysfi-0.1.5.dist-info/RECORD +19 -0
- pysfi-0.1.5.dist-info/WHEEL +4 -0
- pysfi-0.1.5.dist-info/entry_points.txt +11 -0
- sfi/__init__.py +3 -0
- sfi/alarmclock/__init__.py +0 -0
- sfi/alarmclock/alarmclock.py +367 -0
- sfi/bumpversion/__init__.py +3 -0
- sfi/bumpversion/bumpversion.py +535 -0
- sfi/embedinstall/embedinstall.py +418 -0
- sfi/filedate/__init__.py +0 -0
- sfi/filedate/filedate.py +112 -0
- sfi/makepython/__init__.py +0 -0
- sfi/makepython/makepython.py +310 -0
- sfi/projectparse/projectparse.py +152 -0
- sfi/pyloadergen/pyloadergen.py +995 -0
- sfi/pypacker/fspacker.py +91 -0
- sfi/taskkill/taskkill.py +236 -0
- sfi/which/which.py +74 -0
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import os
|
|
7
|
+
import shutil
|
|
8
|
+
import subprocess
|
|
9
|
+
import sys
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
if sys.version_info >= (3, 11):
|
|
14
|
+
import tomllib
|
|
15
|
+
else:
|
|
16
|
+
import tomli as tomllib # type: ignore
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
is_windows = sys.platform == "win32"
|
|
20
|
+
is_linux = sys.platform == "linux"
|
|
21
|
+
is_macos = sys.platform == "darwin"
|
|
22
|
+
|
|
23
|
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
cwd = Path.cwd()
|
|
26
|
+
|
|
27
|
+
_BUILD_COMMANDS = ["uv", "poetry", "hatch"]
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def parse_pyproject_toml(directory: Path) -> dict:
|
|
31
|
+
"""Parse pyproject.toml file in directory and return project data."""
|
|
32
|
+
project_toml = directory / "pyproject.toml"
|
|
33
|
+
if not project_toml.is_file():
|
|
34
|
+
logger.error(f"No pyproject.toml found in {directory}")
|
|
35
|
+
return {}
|
|
36
|
+
|
|
37
|
+
with project_toml.open("rb") as f:
|
|
38
|
+
return tomllib.load(f)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _get_build_command_from_toml(directory: Path) -> str | None:
|
|
42
|
+
"""Get build command from pyproject.toml."""
|
|
43
|
+
logger.debug(f"Parsing pyproject.toml in {directory}")
|
|
44
|
+
|
|
45
|
+
project_data = parse_pyproject_toml(directory)
|
|
46
|
+
if not project_data:
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
if "build-system" in project_data:
|
|
50
|
+
build_system = project_data["build-system"]
|
|
51
|
+
if "build-backend" in build_system:
|
|
52
|
+
build_backend = build_system["build-backend"]
|
|
53
|
+
if build_backend.startswith("poetry."):
|
|
54
|
+
return "poetry"
|
|
55
|
+
elif build_backend.startswith("hatchling."):
|
|
56
|
+
return "hatch"
|
|
57
|
+
else:
|
|
58
|
+
logger.error(f"Unknown build-backend: {build_backend}")
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
logger.error("No `build-system` or `build-backend` found in pyproject.toml: ")
|
|
62
|
+
logger.error(json.dumps(project_data, indent=2, ensure_ascii=False, sort_keys=True))
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _get_build_command(directory: Path):
|
|
67
|
+
"""Get build command from directory."""
|
|
68
|
+
project_path = directory / "pyproject.toml"
|
|
69
|
+
if project_path.is_file():
|
|
70
|
+
logger.debug(f"Found pyproject.toml in {directory}")
|
|
71
|
+
return _get_build_command_from_toml(directory)
|
|
72
|
+
|
|
73
|
+
for command in _BUILD_COMMANDS:
|
|
74
|
+
if shutil.which(command):
|
|
75
|
+
logger.debug(f"Found build command: {command}")
|
|
76
|
+
return command
|
|
77
|
+
logger.error(f"No build command found in {directory}")
|
|
78
|
+
sys.exit(1)
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@dataclass
|
|
83
|
+
class Command:
|
|
84
|
+
name: str
|
|
85
|
+
alias: str
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
_COMMANDS = [
|
|
89
|
+
Command(name="build", alias="b"),
|
|
90
|
+
Command(name="bumpversion", alias="bump"),
|
|
91
|
+
Command(name="clean", alias="c"),
|
|
92
|
+
Command(name="publish", alias="p"),
|
|
93
|
+
Command(name="token", alias="tk"),
|
|
94
|
+
]
|
|
95
|
+
_CHOICES = [command.alias for command in _COMMANDS]
|
|
96
|
+
_CHOICES.extend([command.name for command in _COMMANDS])
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def main():
|
|
100
|
+
parser = argparse.ArgumentParser(description="Make Python")
|
|
101
|
+
parser.add_argument("command", type=str, choices=_CHOICES, help=f"Command to run, options: {_CHOICES}")
|
|
102
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Enable debug mode")
|
|
103
|
+
|
|
104
|
+
args = parser.parse_args()
|
|
105
|
+
if args.debug:
|
|
106
|
+
logger.setLevel(logging.DEBUG)
|
|
107
|
+
|
|
108
|
+
build_command = _get_build_command(cwd) or ""
|
|
109
|
+
logger.info(f"Using build command: {build_command}")
|
|
110
|
+
if args.command in {"build", "b"}:
|
|
111
|
+
_run_command([build_command, "build"], cwd)
|
|
112
|
+
elif args.command in {"bump", "bumpversion"}:
|
|
113
|
+
_run_command(["bumpversion", "patch"], cwd)
|
|
114
|
+
elif args.command in {"clean", "c"}:
|
|
115
|
+
_run_command(["rm", "-rf", "dist", "build", "*.egg-info"], cwd)
|
|
116
|
+
elif args.command in {"publish", "p"}:
|
|
117
|
+
if not _check_pypi_token(build_command):
|
|
118
|
+
_set_token(build_command)
|
|
119
|
+
_run_command([build_command, "publish"], cwd)
|
|
120
|
+
elif args.command in {"token", "tk"}:
|
|
121
|
+
_set_token(build_command)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _set_token(build_command: str, show_header: bool = True) -> None:
|
|
125
|
+
"""Set PyPI token for the specified build command."""
|
|
126
|
+
if show_header:
|
|
127
|
+
logger.info(f"Setting PyPI token for {build_command}...")
|
|
128
|
+
|
|
129
|
+
if build_command.lower() not in _BUILD_COMMANDS:
|
|
130
|
+
logger.error(f"Unknown build command: {build_command}")
|
|
131
|
+
logger.error(f"Please use `{'/'.join(_BUILD_COMMANDS)}`")
|
|
132
|
+
sys.exit(1)
|
|
133
|
+
|
|
134
|
+
token = input("Enter your PyPI token (leave empty to cancel): ").strip()
|
|
135
|
+
if not token:
|
|
136
|
+
logger.info("Invalid token, cancelled.")
|
|
137
|
+
return
|
|
138
|
+
|
|
139
|
+
if build_command == "uv":
|
|
140
|
+
_set_uv_token(token)
|
|
141
|
+
elif build_command == "poetry":
|
|
142
|
+
_set_poetry_token(token)
|
|
143
|
+
elif build_command == "hatch":
|
|
144
|
+
_set_hatch_token(token)
|
|
145
|
+
else:
|
|
146
|
+
logger.error(f"Unknown build command: {build_command}")
|
|
147
|
+
sys.exit(1)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _set_uv_token(token: str) -> None:
|
|
151
|
+
"""Set PyPI token for uv."""
|
|
152
|
+
_write_to_env_file("UV_PUBLISH_TOKEN", token)
|
|
153
|
+
|
|
154
|
+
# Write to `uv.toml`
|
|
155
|
+
config_path = Path.home() / ".config" / "uv" / "uv.toml"
|
|
156
|
+
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
157
|
+
content = f"""[publish]
|
|
158
|
+
token = "{token}"
|
|
159
|
+
"""
|
|
160
|
+
config_path.write_text(content)
|
|
161
|
+
logger.info(f"Token saved to {config_path}")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _set_poetry_token(token: str) -> None:
|
|
165
|
+
"""Set PyPI token for poetry."""
|
|
166
|
+
_write_to_env_file("POETRY_PYPI_TOKEN_PYPI", token)
|
|
167
|
+
_run_command(["poetry", "config", "pypi-token.pypi", token], cwd)
|
|
168
|
+
logger.info("Token saved to Poetry configuration.")
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _set_hatch_token(token: str) -> None:
|
|
172
|
+
"""Set PyPI token for hatch."""
|
|
173
|
+
pypirc_path = Path.home() / ".pypirc"
|
|
174
|
+
pypirc_content = f"""[pypi]
|
|
175
|
+
repository = https://upload.pypi.org/legacy/
|
|
176
|
+
username = __token__
|
|
177
|
+
password = {token}
|
|
178
|
+
"""
|
|
179
|
+
pypirc_path.write_text(pypirc_content)
|
|
180
|
+
logger.info(f"Token saved to {pypirc_path}")
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def _check_pypi_token(build_command: str) -> bool:
|
|
184
|
+
"""Check if PyPI token is configured before publishing."""
|
|
185
|
+
logger.info("Checking PyPI token configuration...")
|
|
186
|
+
|
|
187
|
+
token_found = False
|
|
188
|
+
if build_command == "uv":
|
|
189
|
+
# Check for uv publish token
|
|
190
|
+
token_env_vars = ["UV_PUBLISH_TOKEN", "PYPI_API_TOKEN"]
|
|
191
|
+
for var in token_env_vars:
|
|
192
|
+
if os.getenv(var):
|
|
193
|
+
logger.info(f"Found PyPI token in environment variable: {var}")
|
|
194
|
+
token_found = True
|
|
195
|
+
break
|
|
196
|
+
|
|
197
|
+
# Check for config file
|
|
198
|
+
config_path = Path.home() / ".config" / "uv" / "uv.toml"
|
|
199
|
+
if config_path.exists():
|
|
200
|
+
logger.info(f"Found uv config file: {config_path}")
|
|
201
|
+
token_found = True
|
|
202
|
+
|
|
203
|
+
elif build_command == "poetry":
|
|
204
|
+
# Check for poetry token
|
|
205
|
+
if os.getenv("POETRY_PYPI_TOKEN_PYPI"):
|
|
206
|
+
logger.info("Found PyPI token in POETRY_PYPI_TOKEN_PYPI environment variable")
|
|
207
|
+
token_found = True
|
|
208
|
+
|
|
209
|
+
# Check for poetry config
|
|
210
|
+
result = subprocess.run(
|
|
211
|
+
["poetry", "config", "pypi-token.pypi"],
|
|
212
|
+
capture_output=True,
|
|
213
|
+
text=True,
|
|
214
|
+
)
|
|
215
|
+
if result.stdout.strip() and result.stdout.strip() != "None":
|
|
216
|
+
logger.info("Found PyPI token in Poetry configuration")
|
|
217
|
+
token_found = True
|
|
218
|
+
|
|
219
|
+
elif build_command == "hatch":
|
|
220
|
+
# Check for .pypirc
|
|
221
|
+
pypirc_path = Path.home() / ".pypirc"
|
|
222
|
+
if pypirc_path.exists():
|
|
223
|
+
logger.info(f"Found .pypirc file: {pypirc_path}")
|
|
224
|
+
token_found = True
|
|
225
|
+
|
|
226
|
+
return token_found
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def _run_command(cmd: list[str], directory: Path) -> None:
|
|
230
|
+
"""Run a command in the specified directory."""
|
|
231
|
+
logger.debug(f"Running command: {' '.join(cmd)}")
|
|
232
|
+
try:
|
|
233
|
+
result = subprocess.run(cmd, cwd=directory, capture_output=True, text=True)
|
|
234
|
+
except subprocess.CalledProcessError as e:
|
|
235
|
+
logger.error(f"Command failed with exit code {e.returncode}")
|
|
236
|
+
sys.exit(e.returncode)
|
|
237
|
+
|
|
238
|
+
if result.stdout:
|
|
239
|
+
print(result.stdout)
|
|
240
|
+
if result.stderr:
|
|
241
|
+
print(result.stderr, file=sys.stderr)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _write_to_env_file(key: str, value: str) -> None:
|
|
245
|
+
"""Write key-value pair to environment file."""
|
|
246
|
+
if is_windows:
|
|
247
|
+
subprocess.run(["setx", key, value], shell=True)
|
|
248
|
+
else:
|
|
249
|
+
_write_to_shell_config(f"export {key}='{value}'")
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _get_shell_config_path() -> Path:
|
|
253
|
+
"""Get the appropriate shell config file based on the current shell."""
|
|
254
|
+
# Try to detect the shell
|
|
255
|
+
shell = os.getenv("SHELL", "")
|
|
256
|
+
if "zsh" in shell:
|
|
257
|
+
return Path.home() / ".zshrc"
|
|
258
|
+
else:
|
|
259
|
+
# Default to .bashrc
|
|
260
|
+
return Path.home() / ".bashrc"
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
def _write_to_shell_config(content: str) -> None:
|
|
264
|
+
"""Write content to ~/.bashrc, replacing existing entries if they exist.
|
|
265
|
+
|
|
266
|
+
Deprecated: Use _write_to_shell_config instead.
|
|
267
|
+
"""
|
|
268
|
+
config_path = _get_shell_config_path()
|
|
269
|
+
if not config_path.exists():
|
|
270
|
+
logger.warning(f"{config_path} does not exist, creating it...")
|
|
271
|
+
config_path.touch()
|
|
272
|
+
|
|
273
|
+
# Extract the variable name from the export statement
|
|
274
|
+
# Expected format: export VARIABLE_NAME=value
|
|
275
|
+
var_name = None
|
|
276
|
+
for line in content.strip().split("\n"):
|
|
277
|
+
if line.startswith("export ") and "=" in line:
|
|
278
|
+
var_name = line.split("=")[0].replace("export ", "").strip()
|
|
279
|
+
break
|
|
280
|
+
|
|
281
|
+
if not var_name:
|
|
282
|
+
logger.error("Invalid export statement format. Expected: export VARIABLE_NAME=value")
|
|
283
|
+
return
|
|
284
|
+
|
|
285
|
+
# Read existing content
|
|
286
|
+
existing_lines = config_path.read_text(encoding="utf-8").split("\n")
|
|
287
|
+
|
|
288
|
+
# Find and remove existing export statements for this variable
|
|
289
|
+
new_lines = []
|
|
290
|
+
found_existing = False
|
|
291
|
+
for line in existing_lines:
|
|
292
|
+
# Check if this line exports the same variable
|
|
293
|
+
if line.strip().startswith(f"export {var_name}=") or line.strip().startswith(f"export {var_name} ="):
|
|
294
|
+
found_existing = True
|
|
295
|
+
continue
|
|
296
|
+
new_lines.append(line)
|
|
297
|
+
|
|
298
|
+
if found_existing:
|
|
299
|
+
logger.info(f"Found existing export statement for {var_name}, replacing it...")
|
|
300
|
+
|
|
301
|
+
# Add new content
|
|
302
|
+
new_lines.append(content.strip())
|
|
303
|
+
|
|
304
|
+
# Write back to file
|
|
305
|
+
config_path.write_text("\n".join(new_lines), encoding="utf-8")
|
|
306
|
+
|
|
307
|
+
logger.info(f"Content written to {config_path}")
|
|
308
|
+
logger.info(f"Run `source {config_path}` to apply the changes")
|
|
309
|
+
logger.info(f"Run `cat {config_path}` to view the content")
|
|
310
|
+
logger.info(f"Run `cat {config_path} | grep 'export'` to view the exported variables")
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"""Parse pyproject.toml files in directory, supports multiple projects."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import logging
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
if sys.version_info >= (3, 11):
|
|
13
|
+
import tomllib
|
|
14
|
+
else:
|
|
15
|
+
import tomli as tomllib # type: ignore
|
|
16
|
+
|
|
17
|
+
__all__ = ["parse_project_data"]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
cwd = Path.cwd()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def parse_project_data(directory: Path, recursive: bool = False) -> dict:
|
|
26
|
+
"""Parse pyproject.toml file in directory and return project data.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
dict: Project data.
|
|
30
|
+
"""
|
|
31
|
+
data = _parse_pyproject(directory, recursive=recursive)
|
|
32
|
+
if not data:
|
|
33
|
+
return {}
|
|
34
|
+
return _extract_project_info(data)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _parse_pyproject(directory: Path, recursive: bool = False) -> dict[str, dict]:
|
|
38
|
+
"""Parse pyproject.toml file in directory and return raw data."""
|
|
39
|
+
data = {}
|
|
40
|
+
if recursive:
|
|
41
|
+
for pyproject_path in directory.rglob("pyproject.toml"):
|
|
42
|
+
with pyproject_path.open("rb") as f:
|
|
43
|
+
data[pyproject_path.parent.stem] = tomllib.load(f)
|
|
44
|
+
else:
|
|
45
|
+
pyproject_path = directory / "pyproject.toml"
|
|
46
|
+
if not pyproject_path.is_file():
|
|
47
|
+
logger.error(f"No pyproject.toml found in {directory}")
|
|
48
|
+
return {}
|
|
49
|
+
|
|
50
|
+
with pyproject_path.open("rb") as f:
|
|
51
|
+
logger.debug(f"Parsing {pyproject_path}")
|
|
52
|
+
data[pyproject_path.parent.stem] = tomllib.load(f)
|
|
53
|
+
|
|
54
|
+
logger.debug(f"Parsed {len(data)} pyproject.toml files, data: {data}")
|
|
55
|
+
return data
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _extract_project_info(data: dict) -> dict:
|
|
59
|
+
"""Extract commonly used project information from parsed data."""
|
|
60
|
+
if not data:
|
|
61
|
+
logger.error("No data to extract")
|
|
62
|
+
return {}
|
|
63
|
+
|
|
64
|
+
project_info = {}
|
|
65
|
+
for key, value in data.items():
|
|
66
|
+
if "project" in value:
|
|
67
|
+
project = value.get("project", {})
|
|
68
|
+
build_system = value.get("build-system", {})
|
|
69
|
+
project_info.setdefault(
|
|
70
|
+
key,
|
|
71
|
+
{
|
|
72
|
+
"name": project.get("name"),
|
|
73
|
+
"version": project.get("version"),
|
|
74
|
+
"description": project.get("description"),
|
|
75
|
+
"readme": project.get("readme"),
|
|
76
|
+
"requires_python": project.get("requires-python"),
|
|
77
|
+
"dependencies": project.get("dependencies", []),
|
|
78
|
+
"optional_dependencies": project.get("optional-dependencies", {}),
|
|
79
|
+
"scripts": project.get("scripts", {}),
|
|
80
|
+
"entry_points": project.get("entry-points", {}),
|
|
81
|
+
"authors": project.get("authors", []),
|
|
82
|
+
"license": project.get("license"),
|
|
83
|
+
"keywords": project.get("keywords", []),
|
|
84
|
+
"classifiers": project.get("classifiers", []),
|
|
85
|
+
"urls": project.get("urls", {}),
|
|
86
|
+
"build_backend": build_system.get("build-backend"),
|
|
87
|
+
"requires": build_system.get("requires", []),
|
|
88
|
+
},
|
|
89
|
+
)
|
|
90
|
+
else:
|
|
91
|
+
logger.warning(f"No project information found in {key}")
|
|
92
|
+
project_info.setdefault(key, {})
|
|
93
|
+
|
|
94
|
+
logger.debug(f"Extracted {len(project_info)} projects, info: {project_info}")
|
|
95
|
+
return project_info
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _check_directory(directory: str) -> bool:
|
|
99
|
+
"""Check if directory is valid."""
|
|
100
|
+
if not directory:
|
|
101
|
+
logger.error("Error: No directory specified")
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
dir_path = Path(directory)
|
|
105
|
+
if not dir_path.is_dir():
|
|
106
|
+
logger.error(f"Error: {dir_path} is not a directory")
|
|
107
|
+
return False
|
|
108
|
+
|
|
109
|
+
return True
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def main():
|
|
113
|
+
parser = argparse.ArgumentParser()
|
|
114
|
+
parser.add_argument("--directory", "-D", type=str, default=str(cwd), help="Directory to parse")
|
|
115
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
116
|
+
parser.add_argument("--recursive", "-r", action="store_true", help="Recursively parse subdirectories")
|
|
117
|
+
parser.add_argument("--show", "-s", action="store_true", help="Show parsed data")
|
|
118
|
+
parser.add_argument("--output", "-o", type=str, default="projects.json", help="Output file path")
|
|
119
|
+
|
|
120
|
+
args = parser.parse_args()
|
|
121
|
+
if args.debug:
|
|
122
|
+
logger.setLevel(logging.DEBUG)
|
|
123
|
+
|
|
124
|
+
if not _check_directory(args.directory):
|
|
125
|
+
return
|
|
126
|
+
|
|
127
|
+
output_path = (cwd / args.output).with_suffix(".json")
|
|
128
|
+
if args.show:
|
|
129
|
+
if output_path.is_file():
|
|
130
|
+
logger.info(f"Loading output from `{output_path}`:")
|
|
131
|
+
with output_path.open("r", encoding="utf-8") as f:
|
|
132
|
+
output_data = json.load(f)
|
|
133
|
+
logger.info(json.dumps(output_data, indent=2, ensure_ascii=False, sort_keys=True))
|
|
134
|
+
return
|
|
135
|
+
else:
|
|
136
|
+
logger.debug(f"No json file found at {output_path}, continue parsing...")
|
|
137
|
+
|
|
138
|
+
t0 = time.perf_counter()
|
|
139
|
+
logger.info(f"Parsing pyproject.toml in {args.directory}")
|
|
140
|
+
output_data = parse_project_data(Path(args.directory), recursive=args.recursive)
|
|
141
|
+
if args.show:
|
|
142
|
+
logger.info(json.dumps(output_data, indent=2, ensure_ascii=False, sort_keys=True))
|
|
143
|
+
return
|
|
144
|
+
|
|
145
|
+
try:
|
|
146
|
+
with output_path.open("w", encoding="utf-8") as f:
|
|
147
|
+
json.dump(output_data, f, indent=2, ensure_ascii=False)
|
|
148
|
+
except Exception as e:
|
|
149
|
+
logger.error(f"Error writing output to {output_path}: {e}")
|
|
150
|
+
return
|
|
151
|
+
else:
|
|
152
|
+
logger.info(f"Output written to {output_path}, took {time.perf_counter() - t0:.4f}s")
|