pepscript 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pepscript/__init__.py +29 -0
- pepscript/config.py +167 -0
- pepscript/exceptions.py +27 -0
- pepscript/io.py +39 -0
- pepscript/models.py +87 -0
- pepscript/parser.py +161 -0
- pepscript/py.typed +0 -0
- pepscript/script.py +201 -0
- pepscript/serialize.py +145 -0
- pepscript/validate.py +200 -0
- pepscript-0.1.0.dist-info/METADATA +137 -0
- pepscript-0.1.0.dist-info/RECORD +13 -0
- pepscript-0.1.0.dist-info/WHEEL +4 -0
pepscript/__init__.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Public API for PEPScript."""
|
|
2
|
+
|
|
3
|
+
from .config import ToolConfig
|
|
4
|
+
from .exceptions import (
|
|
5
|
+
DuplicateMetadataBlockError,
|
|
6
|
+
FileLoadError,
|
|
7
|
+
MetadataParseError,
|
|
8
|
+
MetadataValidationError,
|
|
9
|
+
PEPScriptError,
|
|
10
|
+
SaveError,
|
|
11
|
+
)
|
|
12
|
+
from .models import ConfigRoot, Metadata, ScriptFileInfo
|
|
13
|
+
from .script import PEPScript, parse_file, parse_script
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"ToolConfig",
|
|
17
|
+
"DuplicateMetadataBlockError",
|
|
18
|
+
"FileLoadError",
|
|
19
|
+
"MetadataParseError",
|
|
20
|
+
"MetadataValidationError",
|
|
21
|
+
"ConfigRoot",
|
|
22
|
+
"Metadata",
|
|
23
|
+
"PEPScript",
|
|
24
|
+
"PEPScriptError",
|
|
25
|
+
"SaveError",
|
|
26
|
+
"ScriptFileInfo",
|
|
27
|
+
"parse_file",
|
|
28
|
+
"parse_script",
|
|
29
|
+
]
|
pepscript/config.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""Dynamic configuration node for nested tool configuration."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import copy
|
|
6
|
+
from collections.abc import Iterable, Iterator, Mapping
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from typing import Any, cast, overload
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _wrap_value(value: Any) -> Any:
|
|
12
|
+
if isinstance(value, ToolConfig):
|
|
13
|
+
return value
|
|
14
|
+
if isinstance(value, Mapping):
|
|
15
|
+
return ToolConfig.from_dict(dict(value))
|
|
16
|
+
if isinstance(value, list):
|
|
17
|
+
return [_wrap_value(item) for item in value]
|
|
18
|
+
return value
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _unwrap_value(value: Any) -> Any:
|
|
22
|
+
if isinstance(value, ToolConfig):
|
|
23
|
+
return value.to_dict()
|
|
24
|
+
if isinstance(value, list):
|
|
25
|
+
return [_unwrap_value(item) for item in value]
|
|
26
|
+
return value
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass(slots=True)
|
|
30
|
+
class ToolConfig:
|
|
31
|
+
"""Mapping-like node with attribute and item access."""
|
|
32
|
+
|
|
33
|
+
_data: dict[str, Any] = field(default_factory=dict, repr=False)
|
|
34
|
+
|
|
35
|
+
def __post_init__(self) -> None:
|
|
36
|
+
self._data = {key: _wrap_value(value) for key, value in self._data.items()}
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def from_dict(cls, data: Mapping[str, Any]) -> ToolConfig:
|
|
40
|
+
"""Create a ``ToolConfig`` from a plain mapping, wrapping nested dicts recursively.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
data: A mapping whose values may themselves be mappings, lists, or
|
|
44
|
+
scalar values.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
A new ``ToolConfig`` with all nested ``Mapping`` values converted to ``ToolConfig`` instances.
|
|
48
|
+
"""
|
|
49
|
+
return cls(_data={key: _wrap_value(value) for key, value in data.items()})
|
|
50
|
+
|
|
51
|
+
def to_dict(self) -> dict[str, Any]:
|
|
52
|
+
"""Recursively unwrap this node to a plain ``dict``.
|
|
53
|
+
|
|
54
|
+
Returns:
|
|
55
|
+
A plain ``dict`` where all nested ``ToolConfig`` values are also unwrapped to ``dict``.
|
|
56
|
+
"""
|
|
57
|
+
return {key: _unwrap_value(value) for key, value in self._data.items()}
|
|
58
|
+
|
|
59
|
+
def get(self, key: str, default: Any = None) -> Any:
|
|
60
|
+
"""Return the value for *key*, or *default* if the key is absent.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
key: The key to look up.
|
|
64
|
+
default: Value returned when *key* is not present. Defaults to ``None``.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
The stored value (which may be a ``ToolConfig``) or *default*.
|
|
68
|
+
"""
|
|
69
|
+
return self._data.get(key, default)
|
|
70
|
+
|
|
71
|
+
def setdefault(self, key: str, default: Any = None) -> Any:
|
|
72
|
+
"""Return the value for *key*, inserting *default* if the key is absent.
|
|
73
|
+
|
|
74
|
+
Mirrors ``dict.setdefault``. If *default* is a ``Mapping`` it is wrapped
|
|
75
|
+
in a ``ToolConfig`` before insertion.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
key: The key to look up or insert.
|
|
79
|
+
default: Value to insert and return when *key* is not present.
|
|
80
|
+
Defaults to ``None``.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
The existing value for *key*, or the newly inserted *default*.
|
|
84
|
+
"""
|
|
85
|
+
value = self._data.setdefault(key, _wrap_value(default))
|
|
86
|
+
return value
|
|
87
|
+
|
|
88
|
+
@overload
|
|
89
|
+
def update(self, mapping: Mapping[str, Any], /, **kwargs: Any) -> None: ...
|
|
90
|
+
|
|
91
|
+
@overload
|
|
92
|
+
def update(self, mapping: Iterable[tuple[str, Any]], /, **kwargs: Any) -> None: ...
|
|
93
|
+
|
|
94
|
+
@overload
|
|
95
|
+
def update(self, /, **kwargs: Any) -> None: ...
|
|
96
|
+
|
|
97
|
+
def update(
|
|
98
|
+
self,
|
|
99
|
+
mapping: Mapping[str, Any] | Iterable[tuple[str, Any]] = (),
|
|
100
|
+
/,
|
|
101
|
+
**kwargs: Any,
|
|
102
|
+
) -> None:
|
|
103
|
+
"""Update the node with key/value pairs, mirroring ``dict.update``.
|
|
104
|
+
|
|
105
|
+
Accepts a ``Mapping``, an iterable of ``(key, value)`` pairs, or keyword
|
|
106
|
+
arguments (or any combination). Any ``Mapping`` values are wrapped in
|
|
107
|
+
``ToolConfig`` instances.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
mapping: A ``Mapping`` or iterable of ``(key, value)`` pairs to merge.
|
|
111
|
+
**kwargs: Additional key/value pairs to merge.
|
|
112
|
+
"""
|
|
113
|
+
items: Iterator[tuple[str, Any]]
|
|
114
|
+
if isinstance(mapping, Mapping):
|
|
115
|
+
items = iter(cast(Iterable[tuple[str, Any]], mapping.items()))
|
|
116
|
+
else:
|
|
117
|
+
items = iter(mapping)
|
|
118
|
+
for key, value in items:
|
|
119
|
+
self._data[key] = _wrap_value(value)
|
|
120
|
+
for key, value in kwargs.items():
|
|
121
|
+
self._data[key] = _wrap_value(value)
|
|
122
|
+
|
|
123
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> ToolConfig:
|
|
124
|
+
new = object.__new__(type(self))
|
|
125
|
+
memo[id(self)] = new
|
|
126
|
+
object.__setattr__(
|
|
127
|
+
new,
|
|
128
|
+
"_data",
|
|
129
|
+
{key: copy.deepcopy(value, memo) for key, value in self._data.items()},
|
|
130
|
+
)
|
|
131
|
+
return new
|
|
132
|
+
|
|
133
|
+
def __getattr__(self, name: str) -> Any:
|
|
134
|
+
try:
|
|
135
|
+
return self._data[name]
|
|
136
|
+
except KeyError as error:
|
|
137
|
+
raise AttributeError(name) from error
|
|
138
|
+
|
|
139
|
+
def __setattr__(self, name: str, value: Any) -> None:
|
|
140
|
+
if name.startswith("_"):
|
|
141
|
+
object.__setattr__(self, name, value)
|
|
142
|
+
return
|
|
143
|
+
self._data[name] = _wrap_value(value)
|
|
144
|
+
|
|
145
|
+
def __getitem__(self, key: str) -> Any:
|
|
146
|
+
return self._data[key]
|
|
147
|
+
|
|
148
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
149
|
+
self._data[key] = _wrap_value(value)
|
|
150
|
+
|
|
151
|
+
def __delitem__(self, key: str) -> None:
|
|
152
|
+
del self._data[key]
|
|
153
|
+
|
|
154
|
+
def __contains__(self, key: object) -> bool:
|
|
155
|
+
return key in self._data
|
|
156
|
+
|
|
157
|
+
def __iter__(self) -> Iterator[str]:
|
|
158
|
+
return iter(self._data)
|
|
159
|
+
|
|
160
|
+
def __len__(self) -> int:
|
|
161
|
+
return len(self._data)
|
|
162
|
+
|
|
163
|
+
def __bool__(self) -> bool:
|
|
164
|
+
return bool(self._data)
|
|
165
|
+
|
|
166
|
+
def __repr__(self) -> str:
|
|
167
|
+
return f"{type(self).__name__}({self.to_dict()!r})"
|
pepscript/exceptions.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""Custom exceptions for PEPScript."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class PEPScriptError(Exception):
|
|
7
|
+
"""Base exception for all PEPScript errors."""
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class FileLoadError(PEPScriptError):
|
|
11
|
+
"""Raised when a script file cannot be read."""
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class DuplicateMetadataBlockError(PEPScriptError):
|
|
15
|
+
"""Raised when multiple PEP 723 metadata blocks are present."""
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class MetadataParseError(PEPScriptError):
|
|
19
|
+
"""Raised when metadata cannot be parsed."""
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class MetadataValidationError(PEPScriptError):
|
|
23
|
+
"""Raised when metadata is structurally invalid."""
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class SaveError(PEPScriptError):
|
|
27
|
+
"""Raised when a script cannot be saved."""
|
pepscript/io.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""File IO helpers for PEPScript."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from .exceptions import FileLoadError, SaveError
|
|
8
|
+
from .models import ScriptFileInfo
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def build_file_info(path: Path, *, encoding: str) -> ScriptFileInfo:
|
|
12
|
+
"""Build typed file info for a script path."""
|
|
13
|
+
|
|
14
|
+
return ScriptFileInfo(
|
|
15
|
+
path=path,
|
|
16
|
+
name=path.stem,
|
|
17
|
+
filename=path.name,
|
|
18
|
+
suffix=path.suffix,
|
|
19
|
+
exists=path.exists(),
|
|
20
|
+
encoding=encoding,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def read_source(path: Path, *, encoding: str) -> str:
|
|
25
|
+
"""Read source from disk."""
|
|
26
|
+
|
|
27
|
+
try:
|
|
28
|
+
return path.read_text(encoding=encoding)
|
|
29
|
+
except OSError as error:
|
|
30
|
+
raise FileLoadError(f"Failed to read file: {path}") from error
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def write_source(path: Path, source: str, *, encoding: str) -> None:
|
|
34
|
+
"""Write source to disk."""
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
path.write_text(source, encoding=encoding)
|
|
38
|
+
except OSError as error:
|
|
39
|
+
raise SaveError(f"Failed to write file: {path}") from error
|
pepscript/models.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""Typed models for PEPScript."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from .config import ToolConfig
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(slots=True)
|
|
12
|
+
class ScriptFileInfo:
|
|
13
|
+
"""Summary of file metadata exposed by PEPScript."""
|
|
14
|
+
|
|
15
|
+
path: Path
|
|
16
|
+
name: str
|
|
17
|
+
filename: str
|
|
18
|
+
suffix: str
|
|
19
|
+
exists: bool
|
|
20
|
+
encoding: str
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass(slots=True)
|
|
24
|
+
class BlockInfo:
|
|
25
|
+
"""Character offsets for a PEP 723 metadata block."""
|
|
26
|
+
|
|
27
|
+
start: int
|
|
28
|
+
end: int
|
|
29
|
+
content_start: int
|
|
30
|
+
content_end: int
|
|
31
|
+
block_type: str
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass(slots=True)
|
|
35
|
+
class ConfigRoot:
|
|
36
|
+
"""Root container for metadata configuration."""
|
|
37
|
+
|
|
38
|
+
tool: ToolConfig = field(default_factory=ToolConfig)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass(slots=True)
|
|
42
|
+
class Metadata:
|
|
43
|
+
"""Typed metadata model for PEP 723 script metadata."""
|
|
44
|
+
|
|
45
|
+
dependencies: list[str] = field(default_factory=list)
|
|
46
|
+
requires_python: str | None = None
|
|
47
|
+
config: ConfigRoot = field(default_factory=ConfigRoot)
|
|
48
|
+
|
|
49
|
+
def add_dependency(self, dep: str) -> None:
|
|
50
|
+
"""Add a dependency if it is not already present (exact string match).
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
dep: A PEP 508 dependency specifier string (e.g. ``"requests>=2.0"``).
|
|
54
|
+
"""
|
|
55
|
+
if dep not in self.dependencies:
|
|
56
|
+
self.dependencies.append(dep)
|
|
57
|
+
|
|
58
|
+
def remove_dependency(self, dep: str) -> None:
|
|
59
|
+
"""Remove a dependency if present (exact string match, no-op if not found).
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
dep: The exact dependency string to remove.
|
|
63
|
+
"""
|
|
64
|
+
if dep in self.dependencies:
|
|
65
|
+
self.dependencies.remove(dep)
|
|
66
|
+
|
|
67
|
+
def set_requires_python(self, spec: str | None) -> None:
|
|
68
|
+
"""Set or remove the ``requires-python`` field.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
spec: A PEP 440 version specifier string (e.g. ``">=3.12"``), or
|
|
72
|
+
``None`` to remove the field from the metadata block.
|
|
73
|
+
"""
|
|
74
|
+
self.requires_python = spec
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def is_empty(self) -> bool:
|
|
78
|
+
"""Return ``True`` if this metadata object holds no meaningful content.
|
|
79
|
+
|
|
80
|
+
Used internally to decide whether to write a ``# /// script`` block
|
|
81
|
+
when the source did not originally contain one.
|
|
82
|
+
"""
|
|
83
|
+
return (
|
|
84
|
+
not self.dependencies
|
|
85
|
+
and self.requires_python is None
|
|
86
|
+
and not self.config.tool.to_dict()
|
|
87
|
+
)
|
pepscript/parser.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
"""PEP 723 metadata block detection and parsing."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
import tomllib
|
|
8
|
+
from typing import Any, NoReturn, cast
|
|
9
|
+
|
|
10
|
+
from .config import ToolConfig
|
|
11
|
+
from .exceptions import DuplicateMetadataBlockError, MetadataParseError
|
|
12
|
+
from .models import BlockInfo, ConfigRoot, Metadata
|
|
13
|
+
from .validate import validate_metadata
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(slots=True)
|
|
17
|
+
class ParseResult:
|
|
18
|
+
"""Parsed metadata and optional block offsets."""
|
|
19
|
+
|
|
20
|
+
meta: Metadata | None
|
|
21
|
+
block: BlockInfo | None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _is_start_marker(line: str) -> bool:
|
|
25
|
+
return line.strip() == "# /// script"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _is_end_marker(line: str) -> bool:
|
|
29
|
+
return line.strip() == "# ///"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _raise_parse_error(message: str, *, path: Path | None = None) -> NoReturn:
|
|
33
|
+
if path is None:
|
|
34
|
+
raise MetadataParseError(message)
|
|
35
|
+
raise MetadataParseError(f"{message} (path={path})")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _find_blocks(source: str, *, path: Path | None = None) -> list[BlockInfo]:
|
|
39
|
+
lines = source.splitlines(keepends=True)
|
|
40
|
+
offsets: list[int] = []
|
|
41
|
+
total = 0
|
|
42
|
+
for line in lines:
|
|
43
|
+
offsets.append(total)
|
|
44
|
+
total += len(line)
|
|
45
|
+
offsets.append(total)
|
|
46
|
+
|
|
47
|
+
blocks: list[BlockInfo] = []
|
|
48
|
+
index = 0
|
|
49
|
+
while index < len(lines):
|
|
50
|
+
line = lines[index]
|
|
51
|
+
if not _is_start_marker(line):
|
|
52
|
+
index += 1
|
|
53
|
+
continue
|
|
54
|
+
|
|
55
|
+
end_index = index + 1
|
|
56
|
+
while end_index < len(lines) and not _is_end_marker(lines[end_index]):
|
|
57
|
+
end_index += 1
|
|
58
|
+
if end_index >= len(lines):
|
|
59
|
+
_raise_parse_error(
|
|
60
|
+
"Metadata block start marker without end marker", path=path
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
block = BlockInfo(
|
|
64
|
+
start=offsets[index],
|
|
65
|
+
end=offsets[end_index + 1],
|
|
66
|
+
content_start=offsets[index + 1],
|
|
67
|
+
content_end=offsets[end_index],
|
|
68
|
+
block_type="script",
|
|
69
|
+
)
|
|
70
|
+
blocks.append(block)
|
|
71
|
+
index = end_index + 1
|
|
72
|
+
|
|
73
|
+
return blocks
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _extract_toml_content(
|
|
77
|
+
source: str, block: BlockInfo, *, path: Path | None = None
|
|
78
|
+
) -> str:
|
|
79
|
+
raw = source[block.content_start : block.content_end]
|
|
80
|
+
lines = raw.splitlines(keepends=True)
|
|
81
|
+
content_lines: list[str] = []
|
|
82
|
+
for line_number, line in enumerate(lines, start=1):
|
|
83
|
+
if line.strip() == "":
|
|
84
|
+
_raise_parse_error(
|
|
85
|
+
f"Invalid metadata content line {line_number}: expected comment-prefixed content",
|
|
86
|
+
path=path,
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
trimmed = line.lstrip(" \t")
|
|
90
|
+
if not trimmed.startswith("#"):
|
|
91
|
+
_raise_parse_error(
|
|
92
|
+
f"Invalid metadata content line {line_number}: expected '#'",
|
|
93
|
+
path=path,
|
|
94
|
+
)
|
|
95
|
+
text = trimmed[1:]
|
|
96
|
+
if text.startswith(" "):
|
|
97
|
+
text = text[1:]
|
|
98
|
+
content_lines.append(text)
|
|
99
|
+
return "".join(content_lines)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def _parse_metadata_table(
|
|
103
|
+
table: dict[str, Any], *, path: Path | None = None
|
|
104
|
+
) -> Metadata:
|
|
105
|
+
dependencies: list[str] = []
|
|
106
|
+
if "dependencies" in table:
|
|
107
|
+
value = table["dependencies"]
|
|
108
|
+
if not isinstance(value, list):
|
|
109
|
+
_raise_parse_error("'dependencies' must be a list", path=path)
|
|
110
|
+
for item in cast(list[Any], value):
|
|
111
|
+
if not isinstance(item, str):
|
|
112
|
+
_raise_parse_error("'dependencies' entries must be strings", path=path)
|
|
113
|
+
dependencies.append(item)
|
|
114
|
+
|
|
115
|
+
requires_python: str | None = None
|
|
116
|
+
if "requires-python" in table:
|
|
117
|
+
value = table["requires-python"]
|
|
118
|
+
if not isinstance(value, str):
|
|
119
|
+
_raise_parse_error("'requires-python' must be a string", path=path)
|
|
120
|
+
requires_python = cast(str, value)
|
|
121
|
+
|
|
122
|
+
tool_node = ToolConfig()
|
|
123
|
+
if "tool" in table:
|
|
124
|
+
tool = table["tool"]
|
|
125
|
+
if not isinstance(tool, dict):
|
|
126
|
+
_raise_parse_error("'tool' must be a table/object", path=path)
|
|
127
|
+
tool_node = ToolConfig.from_dict(cast(dict[str, Any], tool))
|
|
128
|
+
|
|
129
|
+
return Metadata(
|
|
130
|
+
dependencies=dependencies,
|
|
131
|
+
requires_python=requires_python,
|
|
132
|
+
config=ConfigRoot(tool=tool_node),
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def parse_source(
|
|
137
|
+
source: str, *, strict: bool = True, path: Path | None = None
|
|
138
|
+
) -> ParseResult:
|
|
139
|
+
"""Parse PEP 723 metadata from source text."""
|
|
140
|
+
|
|
141
|
+
blocks = _find_blocks(source, path=path)
|
|
142
|
+
if len(blocks) > 1:
|
|
143
|
+
if path is None:
|
|
144
|
+
raise DuplicateMetadataBlockError("Multiple PEP 723 metadata blocks found")
|
|
145
|
+
raise DuplicateMetadataBlockError(
|
|
146
|
+
f"Multiple PEP 723 metadata blocks found (path={path})"
|
|
147
|
+
)
|
|
148
|
+
if not blocks:
|
|
149
|
+
return ParseResult(meta=None, block=None)
|
|
150
|
+
|
|
151
|
+
block = blocks[0]
|
|
152
|
+
content = _extract_toml_content(source, block, path=path)
|
|
153
|
+
try:
|
|
154
|
+
parsed = tomllib.loads(content)
|
|
155
|
+
except tomllib.TOMLDecodeError as error:
|
|
156
|
+
_raise_parse_error(f"Invalid metadata TOML: {error}", path=path)
|
|
157
|
+
|
|
158
|
+
meta = _parse_metadata_table(parsed, path=path)
|
|
159
|
+
if strict:
|
|
160
|
+
validate_metadata(meta, path=path)
|
|
161
|
+
return ParseResult(meta=meta, block=block)
|
pepscript/py.typed
ADDED
|
File without changes
|
pepscript/script.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
"""Public PEPScript document API."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import copy
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from .exceptions import SaveError
|
|
9
|
+
from .io import build_file_info, read_source, write_source
|
|
10
|
+
from .models import BlockInfo, Metadata, ScriptFileInfo
|
|
11
|
+
from .parser import parse_source
|
|
12
|
+
from .serialize import rewrite_source
|
|
13
|
+
from .validate import validate_metadata
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class PEPScript:
|
|
17
|
+
"""A typed document wrapper for PEP 723-enabled scripts."""
|
|
18
|
+
|
|
19
|
+
path: Path | None
|
|
20
|
+
encoding: str
|
|
21
|
+
strict: bool
|
|
22
|
+
source: str
|
|
23
|
+
file: ScriptFileInfo | None
|
|
24
|
+
meta: Metadata
|
|
25
|
+
has_metadata: bool
|
|
26
|
+
_block: BlockInfo | None
|
|
27
|
+
_snapshot: tuple[Metadata, BlockInfo | None, bool] | None
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self, path: str | Path, *, encoding: str = "utf-8", strict: bool = True
|
|
31
|
+
):
|
|
32
|
+
"""Load and parse a PEP 723 script from disk.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
path: Path to the Python script file.
|
|
36
|
+
encoding: File encoding used when reading and writing. Defaults to ``"utf-8"``.
|
|
37
|
+
strict: If ``True`` (default), validate metadata immediately after parsing.
|
|
38
|
+
|
|
39
|
+
Raises:
|
|
40
|
+
FileLoadError: If the file cannot be read.
|
|
41
|
+
DuplicateMetadataBlockError: If more than one ``# /// script`` block is found.
|
|
42
|
+
MetadataParseError: If the embedded TOML is malformed.
|
|
43
|
+
MetadataValidationError: If ``strict=True`` and the metadata fails validation.
|
|
44
|
+
"""
|
|
45
|
+
self.path = Path(path)
|
|
46
|
+
self.encoding = encoding
|
|
47
|
+
self.strict = strict
|
|
48
|
+
self.source = ""
|
|
49
|
+
self.file = None
|
|
50
|
+
self.meta = Metadata()
|
|
51
|
+
self.has_metadata = False
|
|
52
|
+
self._block = None
|
|
53
|
+
self._snapshot = None
|
|
54
|
+
self.reload()
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def from_source(cls, source: str, *, strict: bool = True) -> PEPScript:
|
|
58
|
+
"""Create an in-memory script document from source text."""
|
|
59
|
+
|
|
60
|
+
instance = cls.__new__(cls)
|
|
61
|
+
instance.path = None
|
|
62
|
+
instance.encoding = "utf-8"
|
|
63
|
+
instance.strict = strict
|
|
64
|
+
instance.source = source
|
|
65
|
+
instance.file = None
|
|
66
|
+
instance.meta = Metadata()
|
|
67
|
+
instance.has_metadata = False
|
|
68
|
+
instance._block = None
|
|
69
|
+
instance._snapshot = None
|
|
70
|
+
instance._parse_current_source()
|
|
71
|
+
return instance
|
|
72
|
+
|
|
73
|
+
def __enter__(self) -> PEPScript:
|
|
74
|
+
"""Enter edit mode, snapshotting current metadata for potential rollback.
|
|
75
|
+
|
|
76
|
+
On a clean exit, changes are automatically persisted via ``save()``
|
|
77
|
+
(file-backed scripts only; in-memory scripts retain edits in-memory).
|
|
78
|
+
If an exception propagates out of the ``with`` block, all in-memory
|
|
79
|
+
edits are discarded by restoring the pre-enter snapshot.
|
|
80
|
+
|
|
81
|
+
Only ``meta``, ``has_metadata``, and the internal block offsets are
|
|
82
|
+
snapshotted — the full source text is not copied — so this is efficient
|
|
83
|
+
even for large files.
|
|
84
|
+
"""
|
|
85
|
+
self._snapshot = (copy.deepcopy(self.meta), self._block, self.has_metadata)
|
|
86
|
+
return self
|
|
87
|
+
|
|
88
|
+
def __exit__(self, exc_type, exc, tb) -> None:
|
|
89
|
+
"""Exit edit mode, saving or rolling back depending on whether an exception occurred.
|
|
90
|
+
|
|
91
|
+
On clean exit, ``save()`` is called for file-backed scripts.
|
|
92
|
+
On exception, in-memory state is restored from the snapshot taken at
|
|
93
|
+
``__enter__`` and the exception is re-raised.
|
|
94
|
+
"""
|
|
95
|
+
try:
|
|
96
|
+
if exc_type is None:
|
|
97
|
+
if self.path is not None:
|
|
98
|
+
self.save()
|
|
99
|
+
else:
|
|
100
|
+
if self._snapshot is not None:
|
|
101
|
+
self.meta, self._block, self.has_metadata = self._snapshot
|
|
102
|
+
finally:
|
|
103
|
+
self._snapshot = None
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
def _parse_current_source(self) -> None:
|
|
107
|
+
parsed = parse_source(self.source, strict=self.strict, path=self.path)
|
|
108
|
+
self.has_metadata = parsed.meta is not None
|
|
109
|
+
self.meta = parsed.meta if parsed.meta is not None else Metadata()
|
|
110
|
+
self._block = parsed.block
|
|
111
|
+
|
|
112
|
+
def validate(self) -> None:
|
|
113
|
+
"""Run structural validation against the current metadata.
|
|
114
|
+
|
|
115
|
+
This is a no-op when the script has no metadata block and ``meta``
|
|
116
|
+
is empty. Validates:
|
|
117
|
+
|
|
118
|
+
- Structural shape of the metadata (correct types for all fields)
|
|
119
|
+
- Each dependency is a valid PEP 508 specifier (name, extras, version
|
|
120
|
+
operators, environment markers)
|
|
121
|
+
- ``requires-python`` is a valid PEP 440 version specifier
|
|
122
|
+
|
|
123
|
+
Raises:
|
|
124
|
+
MetadataValidationError: If the metadata fails structural validation.
|
|
125
|
+
"""
|
|
126
|
+
if not self.has_metadata and self.meta.is_empty:
|
|
127
|
+
return
|
|
128
|
+
validate_metadata(self.meta, path=self.path)
|
|
129
|
+
|
|
130
|
+
def reload(self) -> None:
|
|
131
|
+
"""Discard all in-memory edits and reload state from disk (or re-parse source).
|
|
132
|
+
|
|
133
|
+
For file-backed scripts (``self.path`` is set), the source is re-read from
|
|
134
|
+
disk, ``self.file`` is refreshed, and metadata is re-parsed. For in-memory
|
|
135
|
+
scripts created via ``from_source``, ``self.source`` is re-parsed without
|
|
136
|
+
any I/O.
|
|
137
|
+
"""
|
|
138
|
+
if self.path is None:
|
|
139
|
+
self._parse_current_source()
|
|
140
|
+
return
|
|
141
|
+
|
|
142
|
+
self.source = read_source(self.path, encoding=self.encoding)
|
|
143
|
+
self.file = build_file_info(self.path, encoding=self.encoding)
|
|
144
|
+
self._parse_current_source()
|
|
145
|
+
|
|
146
|
+
def to_source(self) -> str:
|
|
147
|
+
"""Serialize current state to source text without writing to disk."""
|
|
148
|
+
meta_to_write = (
|
|
149
|
+
self.meta if (self.has_metadata or not self.meta.is_empty) else None
|
|
150
|
+
)
|
|
151
|
+
return rewrite_source(self.source, meta=meta_to_write, block=self._block)
|
|
152
|
+
|
|
153
|
+
def save(self) -> None:
|
|
154
|
+
"""Persist the current state to disk, then reload.
|
|
155
|
+
|
|
156
|
+
The metadata block is deterministically regenerated (keys sorted,
|
|
157
|
+
consistent formatting) and written back to ``self.path``. The rest of
|
|
158
|
+
the source is preserved exactly. After writing, ``reload()`` is called
|
|
159
|
+
so that ``self.source``, ``self.file``, and ``self.meta`` reflect the
|
|
160
|
+
saved file.
|
|
161
|
+
|
|
162
|
+
Raises:
|
|
163
|
+
SaveError: If ``self.path`` is ``None`` (in-memory script) or the
|
|
164
|
+
file cannot be written.
|
|
165
|
+
"""
|
|
166
|
+
if self.path is None:
|
|
167
|
+
raise SaveError("Cannot save in-memory script without a file path")
|
|
168
|
+
write_source(self.path, self.to_source(), encoding=self.encoding)
|
|
169
|
+
self.reload()
|
|
170
|
+
|
|
171
|
+
def save_as(self, path: str | Path) -> None:
|
|
172
|
+
"""Write the current state to an arbitrary path, then reload from that path.
|
|
173
|
+
|
|
174
|
+
After a successful write, ``self.path`` is updated to *path* and
|
|
175
|
+
``reload()`` is called so that ``self.source``, ``self.file``, and
|
|
176
|
+
``self.meta`` reflect the new file location.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
path: Destination file path (``str`` or ``Path``).
|
|
180
|
+
|
|
181
|
+
Raises:
|
|
182
|
+
SaveError: If the file cannot be written.
|
|
183
|
+
"""
|
|
184
|
+
target = Path(path)
|
|
185
|
+
write_source(target, self.to_source(), encoding=self.encoding)
|
|
186
|
+
self.path = target
|
|
187
|
+
self.reload()
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def parse_script(source: str, *, strict: bool = True) -> PEPScript:
|
|
191
|
+
"""Parse source text into an in-memory PEPScript."""
|
|
192
|
+
|
|
193
|
+
return PEPScript.from_source(source, strict=strict)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def parse_file(
|
|
197
|
+
path: str | Path, *, encoding: str = "utf-8", strict: bool = True
|
|
198
|
+
) -> PEPScript:
|
|
199
|
+
"""Parse a file path into a PEPScript."""
|
|
200
|
+
|
|
201
|
+
return PEPScript(path, encoding=encoding, strict=strict)
|
pepscript/serialize.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"""Serialization and source rewrite utilities."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Mapping
|
|
6
|
+
import json
|
|
7
|
+
import re
|
|
8
|
+
from typing import Any, cast
|
|
9
|
+
|
|
10
|
+
from .config import ToolConfig
|
|
11
|
+
from .models import BlockInfo, Metadata
|
|
12
|
+
|
|
13
|
+
_BARE_KEY_RE = re.compile(r"^[A-Za-z0-9_-]+$")
|
|
14
|
+
_CODING_RE = re.compile(r"^[ \t]*#.*coding[:=][ \t]*[-_.a-zA-Z0-9]+")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _format_key(key: str) -> str:
|
|
18
|
+
if _BARE_KEY_RE.match(key):
|
|
19
|
+
return key
|
|
20
|
+
return json.dumps(key)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _to_plain(value: Any) -> Any:
|
|
24
|
+
if isinstance(value, ToolConfig):
|
|
25
|
+
return value.to_dict()
|
|
26
|
+
if isinstance(value, list):
|
|
27
|
+
return [_to_plain(item) for item in value]
|
|
28
|
+
return value
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _format_value(value: Any) -> str:
|
|
32
|
+
value = _to_plain(value)
|
|
33
|
+
if isinstance(value, bool):
|
|
34
|
+
return "true" if value else "false"
|
|
35
|
+
if isinstance(value, str):
|
|
36
|
+
return json.dumps(value)
|
|
37
|
+
if isinstance(value, int):
|
|
38
|
+
return str(value)
|
|
39
|
+
if isinstance(value, float):
|
|
40
|
+
return repr(value)
|
|
41
|
+
if value is None:
|
|
42
|
+
raise TypeError("None is not a TOML scalar value")
|
|
43
|
+
if isinstance(value, list):
|
|
44
|
+
inner = ", ".join(_format_value(item) for item in value)
|
|
45
|
+
return f"[{inner}]"
|
|
46
|
+
raise TypeError(f"Unsupported TOML value type: {type(value).__name__}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _emit_table(path: list[str], mapping: Mapping[str, Any], lines: list[str]) -> None:
|
|
50
|
+
header = ".".join(_format_key(part) for part in path)
|
|
51
|
+
lines.append(f"[{header}]")
|
|
52
|
+
|
|
53
|
+
scalar_keys: list[str] = []
|
|
54
|
+
table_keys: list[str] = []
|
|
55
|
+
for key in sorted(mapping):
|
|
56
|
+
value = _to_plain(mapping[key])
|
|
57
|
+
if isinstance(value, Mapping):
|
|
58
|
+
table_keys.append(key)
|
|
59
|
+
else:
|
|
60
|
+
scalar_keys.append(key)
|
|
61
|
+
|
|
62
|
+
for key in scalar_keys:
|
|
63
|
+
value = _to_plain(mapping[key])
|
|
64
|
+
lines.append(f"{_format_key(key)} = {_format_value(value)}")
|
|
65
|
+
|
|
66
|
+
for key in table_keys:
|
|
67
|
+
if lines and lines[-1] != "":
|
|
68
|
+
lines.append("")
|
|
69
|
+
child = _to_plain(mapping[key])
|
|
70
|
+
if isinstance(child, Mapping):
|
|
71
|
+
_emit_table([*path, key], cast(Mapping[str, Any], child), lines)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def serialize_metadata_toml(meta: Metadata) -> str:
|
|
75
|
+
"""Serialize typed metadata into deterministic TOML."""
|
|
76
|
+
|
|
77
|
+
lines: list[str] = []
|
|
78
|
+
if meta.dependencies:
|
|
79
|
+
lines.append(f"dependencies = {_format_value(meta.dependencies)}")
|
|
80
|
+
if meta.requires_python is not None:
|
|
81
|
+
lines.append(f"requires-python = {_format_value(meta.requires_python)}")
|
|
82
|
+
|
|
83
|
+
tool = _to_plain(meta.config.tool)
|
|
84
|
+
if isinstance(tool, Mapping) and tool:
|
|
85
|
+
if lines:
|
|
86
|
+
lines.append("")
|
|
87
|
+
_emit_table(["tool"], cast(Mapping[str, Any], tool), lines)
|
|
88
|
+
|
|
89
|
+
if not lines:
|
|
90
|
+
return ""
|
|
91
|
+
return "\n".join(lines) + "\n"
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def render_metadata_block(meta: Metadata) -> str:
|
|
95
|
+
"""Render a PEP 723 block from metadata."""
|
|
96
|
+
|
|
97
|
+
toml = serialize_metadata_toml(meta)
|
|
98
|
+
output: list[str] = ["# /// script\n"]
|
|
99
|
+
for line in toml.splitlines():
|
|
100
|
+
if line:
|
|
101
|
+
output.append(f"# {line}\n")
|
|
102
|
+
else:
|
|
103
|
+
output.append("#\n")
|
|
104
|
+
output.append("# ///\n")
|
|
105
|
+
return "".join(output)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _insertion_offset(source: str) -> int:
|
|
109
|
+
lines = source.splitlines(keepends=True)
|
|
110
|
+
if not lines:
|
|
111
|
+
return 0
|
|
112
|
+
offset = 0
|
|
113
|
+
index = 0
|
|
114
|
+
if lines and lines[0].startswith("#!"):
|
|
115
|
+
offset += len(lines[0])
|
|
116
|
+
index = 1
|
|
117
|
+
if index < len(lines) and _CODING_RE.match(lines[index]):
|
|
118
|
+
offset += len(lines[index])
|
|
119
|
+
return offset
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def rewrite_source(
|
|
123
|
+
source: str,
|
|
124
|
+
*,
|
|
125
|
+
meta: Metadata | None,
|
|
126
|
+
block: BlockInfo | None,
|
|
127
|
+
) -> str:
|
|
128
|
+
"""Rewrite source with inserted/replaced/removed metadata block."""
|
|
129
|
+
|
|
130
|
+
if block is not None:
|
|
131
|
+
if meta is None:
|
|
132
|
+
return source[: block.start] + source[block.end :]
|
|
133
|
+
rendered = render_metadata_block(meta)
|
|
134
|
+
return source[: block.start] + rendered + source[block.end :]
|
|
135
|
+
|
|
136
|
+
if meta is None:
|
|
137
|
+
return source
|
|
138
|
+
|
|
139
|
+
rendered = render_metadata_block(meta)
|
|
140
|
+
insert_at = _insertion_offset(source)
|
|
141
|
+
before = source[:insert_at]
|
|
142
|
+
after = source[insert_at:]
|
|
143
|
+
|
|
144
|
+
spacer = "\n" if after and not after.startswith("\n") else ""
|
|
145
|
+
return before + rendered + spacer + after
|
pepscript/validate.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"""PEP 508 dependency and PEP 440 version specifier validation for parsed metadata."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import re
|
|
6
|
+
from collections.abc import Mapping
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import NoReturn
|
|
9
|
+
|
|
10
|
+
from .config import ToolConfig
|
|
11
|
+
from .exceptions import MetadataValidationError
|
|
12
|
+
from .models import Metadata
|
|
13
|
+
|
|
14
|
+
# PEP 508 distribution name: starts/ends with alphanumeric, may contain ._- in between
|
|
15
|
+
_NAME_RE = re.compile(r"^[A-Za-z0-9]([A-Za-z0-9._-]*[A-Za-z0-9])?$")
|
|
16
|
+
|
|
17
|
+
# PEP 440 version clause: operator + version string (e.g. ">=1.0", "==1.*")
|
|
18
|
+
_VERSION_CLAUSE_RE = re.compile(
|
|
19
|
+
r"^\s*(~=|===|==|!=|<=|>=|<|>)\s*[A-Za-z0-9.*+!_-]+\s*$"
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
# Valid PEP 508 marker variable names
|
|
23
|
+
_VALID_MARKER_VARS = frozenset(
|
|
24
|
+
{
|
|
25
|
+
"os_name",
|
|
26
|
+
"sys_platform",
|
|
27
|
+
"platform_machine",
|
|
28
|
+
"platform_python_implementation",
|
|
29
|
+
"platform_release",
|
|
30
|
+
"platform_system",
|
|
31
|
+
"platform_version",
|
|
32
|
+
"python_version",
|
|
33
|
+
"python_full_version",
|
|
34
|
+
"implementation_name",
|
|
35
|
+
"implementation_version",
|
|
36
|
+
"extra",
|
|
37
|
+
# Deprecated setuptools-style dotted names still seen in the wild
|
|
38
|
+
"os.name",
|
|
39
|
+
"sys.platform",
|
|
40
|
+
"platform.version",
|
|
41
|
+
"platform.machine",
|
|
42
|
+
"platform.python_implementation",
|
|
43
|
+
}
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
_MARKER_KEYWORDS = frozenset({"and", "or", "not", "in"})
|
|
47
|
+
|
|
48
|
+
# Matches simple and dotted identifiers (e.g. python_version, os.name)
|
|
49
|
+
_MARKER_IDENT_RE = re.compile(r"\b([a-z_][a-z0-9_]*(?:\.[a-z_][a-z0-9_]*)*)\b")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _raise_validation_error(message: str, *, path: Path | None = None) -> NoReturn:
|
|
53
|
+
if path is None:
|
|
54
|
+
raise MetadataValidationError(message)
|
|
55
|
+
raise MetadataValidationError(f"{message} (path={path})")
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _is_scalar(value: object) -> bool:
|
|
59
|
+
return isinstance(value, (str, int, float, bool))
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _validate_tool_value(
|
|
63
|
+
value: object, *, path: Path | None = None, location: str = "tool"
|
|
64
|
+
) -> None:
|
|
65
|
+
if isinstance(value, ToolConfig):
|
|
66
|
+
_validate_tool_value(value.to_dict(), path=path, location=location)
|
|
67
|
+
return
|
|
68
|
+
if isinstance(value, Mapping):
|
|
69
|
+
for key, item in value.items():
|
|
70
|
+
if not isinstance(key, str):
|
|
71
|
+
_raise_validation_error(f"{location} keys must be strings", path=path)
|
|
72
|
+
_validate_tool_value(item, path=path, location=f"{location}.{key}")
|
|
73
|
+
return
|
|
74
|
+
if isinstance(value, list):
|
|
75
|
+
for index, item in enumerate(value):
|
|
76
|
+
_validate_tool_value(item, path=path, location=f"{location}[{index}]")
|
|
77
|
+
return
|
|
78
|
+
if _is_scalar(value):
|
|
79
|
+
return
|
|
80
|
+
_raise_validation_error(
|
|
81
|
+
f"{location} contains unsupported value type: {type(value).__name__}", path=path
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _validate_marker(marker: str, *, loc: str, path: Path | None = None) -> None:
|
|
86
|
+
"""Check that all unquoted identifiers in a marker expression are valid."""
|
|
87
|
+
without_quotes = re.sub(r'"[^"]*"|\'[^\']*\'', "", marker)
|
|
88
|
+
for match in _MARKER_IDENT_RE.finditer(without_quotes):
|
|
89
|
+
ident = match.group(1)
|
|
90
|
+
if ident in _MARKER_KEYWORDS:
|
|
91
|
+
continue
|
|
92
|
+
if ident not in _VALID_MARKER_VARS:
|
|
93
|
+
_raise_validation_error(
|
|
94
|
+
f"{loc} has unknown marker variable {ident!r}", path=path
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def _validate_pep508_dependency(
|
|
99
|
+
dep: str, *, path: Path | None = None, index: int = 0
|
|
100
|
+
) -> None:
|
|
101
|
+
"""Validate a single PEP 508 dependency specifier string."""
|
|
102
|
+
loc = f"dependencies[{index}]"
|
|
103
|
+
raw = dep.strip()
|
|
104
|
+
|
|
105
|
+
if not raw:
|
|
106
|
+
_raise_validation_error(f"{loc} must not be empty", path=path)
|
|
107
|
+
|
|
108
|
+
# Split off environment marker at first semicolon
|
|
109
|
+
if ";" in raw:
|
|
110
|
+
req_part, marker_part = raw.split(";", 1)
|
|
111
|
+
_validate_marker(marker_part.strip(), loc=loc, path=path)
|
|
112
|
+
else:
|
|
113
|
+
req_part = raw
|
|
114
|
+
|
|
115
|
+
req_part = req_part.strip()
|
|
116
|
+
is_url = "@" in req_part
|
|
117
|
+
|
|
118
|
+
# For URL requirements validate only the name/extras part before the @
|
|
119
|
+
name_scope = req_part[: req_part.index("@")].strip() if is_url else req_part
|
|
120
|
+
|
|
121
|
+
# Extract package name (stops at [, version operator chars, whitespace, or end)
|
|
122
|
+
name_match = re.match(r"[A-Za-z0-9][A-Za-z0-9._-]*", name_scope)
|
|
123
|
+
if not name_match:
|
|
124
|
+
_raise_validation_error(
|
|
125
|
+
f"{loc} has an invalid package name in {dep!r}", path=path
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
name = name_match.group()
|
|
129
|
+
if not _NAME_RE.match(name):
|
|
130
|
+
_raise_validation_error(f"{loc} has invalid package name {name!r}", path=path)
|
|
131
|
+
|
|
132
|
+
rest = name_scope[name_match.end() :].strip()
|
|
133
|
+
|
|
134
|
+
# Optional extras: [extra1, extra2, ...]
|
|
135
|
+
if rest.startswith("["):
|
|
136
|
+
close = rest.find("]")
|
|
137
|
+
if close == -1:
|
|
138
|
+
_raise_validation_error(
|
|
139
|
+
f"{loc} has unclosed extras '[' in {dep!r}", path=path
|
|
140
|
+
)
|
|
141
|
+
for extra in rest[1:close].split(","):
|
|
142
|
+
e = extra.strip()
|
|
143
|
+
if not e:
|
|
144
|
+
continue
|
|
145
|
+
if not _NAME_RE.match(e):
|
|
146
|
+
_raise_validation_error(f"{loc} has invalid extra {e!r}", path=path)
|
|
147
|
+
rest = rest[close + 1 :].strip()
|
|
148
|
+
|
|
149
|
+
# For URL requirements, nothing should remain between name/extras and @
|
|
150
|
+
if is_url and rest:
|
|
151
|
+
_raise_validation_error(
|
|
152
|
+
f"{loc} has unexpected content before '@' in {dep!r}", path=path
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
# Version specifiers (not applicable for URL requirements)
|
|
156
|
+
if not is_url and rest:
|
|
157
|
+
for clause in rest.split(","):
|
|
158
|
+
if not _VERSION_CLAUSE_RE.match(clause):
|
|
159
|
+
_raise_validation_error(
|
|
160
|
+
f"{loc} has invalid version specifier {clause.strip()!r} in {dep!r}",
|
|
161
|
+
path=path,
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _validate_requires_python(spec: str, *, path: Path | None = None) -> None:
|
|
166
|
+
"""Validate a PEP 440 requires-python version specifier string."""
|
|
167
|
+
for clause in spec.split(","):
|
|
168
|
+
if not _VERSION_CLAUSE_RE.match(clause):
|
|
169
|
+
_raise_validation_error(
|
|
170
|
+
f"'requires-python' has invalid specifier {clause.strip()!r}",
|
|
171
|
+
path=path,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def validate_metadata(meta: Metadata | None, *, path: Path | None = None) -> None:
|
|
176
|
+
"""Validate metadata structure, PEP 508 dependency specifiers, and PEP 440 version constraints."""
|
|
177
|
+
|
|
178
|
+
if meta is None:
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
if not isinstance(meta.dependencies, list):
|
|
182
|
+
_raise_validation_error("'dependencies' must be a list", path=path)
|
|
183
|
+
for index, dep in enumerate(meta.dependencies):
|
|
184
|
+
if not isinstance(dep, str):
|
|
185
|
+
_raise_validation_error(
|
|
186
|
+
f"'dependencies[{index}]' must be a string",
|
|
187
|
+
path=path,
|
|
188
|
+
)
|
|
189
|
+
else:
|
|
190
|
+
_validate_pep508_dependency(dep, path=path, index=index)
|
|
191
|
+
|
|
192
|
+
if meta.requires_python is not None:
|
|
193
|
+
if not isinstance(meta.requires_python, str):
|
|
194
|
+
_raise_validation_error(
|
|
195
|
+
"'requires-python' must be a string or None", path=path
|
|
196
|
+
)
|
|
197
|
+
else:
|
|
198
|
+
_validate_requires_python(meta.requires_python, path=path)
|
|
199
|
+
|
|
200
|
+
_validate_tool_value(meta.config.tool, path=path)
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: pepscript
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Typed Python library for working with PEP 723 inline script metadata
|
|
5
|
+
Keywords: pep723,metadata,inline-script,toml
|
|
6
|
+
Author: maximiliancw
|
|
7
|
+
Author-email: maximiliancw <wunderkind.serie-0f@icloud.com>
|
|
8
|
+
License-Expression: MIT
|
|
9
|
+
Classifier: Development Status :: 3 - Alpha
|
|
10
|
+
Classifier: Intended Audience :: Developers
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
14
|
+
Classifier: Typing :: Typed
|
|
15
|
+
Requires-Python: >=3.12
|
|
16
|
+
Project-URL: Repository, https://github.com/botlot-project/PEPscript
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
|
|
19
|
+
# PEPScript
|
|
20
|
+
|
|
21
|
+
[](https://github.com/botlot-project/PEPscript/actions/workflows/ci.yml)
|
|
22
|
+
[](https://pypi.org/project/pepscript/)
|
|
23
|
+
[](https://pypi.org/project/pepscript/)
|
|
24
|
+
[](https://github.com/botlot-project/PEPscript/blob/main/LICENSE)
|
|
25
|
+
[](https://github.com/botlot-project/PEPscript)
|
|
26
|
+
[](https://botlot-project.github.io/PEPscript/)
|
|
27
|
+
|
|
28
|
+
A Python library for parsing, validating, editing, and saving [PEP 723](https://peps.python.org/pep-0723/) inline script metadata through a typed API.
|
|
29
|
+
|
|
30
|
+
- Zero runtime dependencies (stdlib only)
|
|
31
|
+
- Python 3.12+
|
|
32
|
+
- Full type hints with `py.typed`
|
|
33
|
+
|
|
34
|
+
## Installation
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
pip install pepscript
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## Usage
|
|
41
|
+
|
|
42
|
+
### Read metadata from a script
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
from pepscript import PEPScript
|
|
46
|
+
|
|
47
|
+
script = PEPScript("my_script.py")
|
|
48
|
+
if script.has_metadata:
|
|
49
|
+
print(script.meta.dependencies)
|
|
50
|
+
print(script.meta.requires_python)
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
### Edit and save
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
from pepscript import PEPScript
|
|
57
|
+
|
|
58
|
+
with PEPScript("my_script.py") as script:
|
|
59
|
+
script.meta.add_dependency("httpx>=0.27")
|
|
60
|
+
script.meta.set_requires_python(">=3.12")
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
### Parse from a string
|
|
64
|
+
|
|
65
|
+
```python
|
|
66
|
+
from pepscript import parse_script
|
|
67
|
+
|
|
68
|
+
script = parse_script("""\
|
|
69
|
+
# /// script
|
|
70
|
+
# dependencies = ["requests>=2.0"]
|
|
71
|
+
# requires-python = ">=3.12"
|
|
72
|
+
# ///
|
|
73
|
+
print("hello")
|
|
74
|
+
""")
|
|
75
|
+
print(script.meta.dependencies) # ['requests>=2.0']
|
|
76
|
+
```
|
|
77
|
+
|
|
78
|
+
### Access tool configuration
|
|
79
|
+
|
|
80
|
+
```python
|
|
81
|
+
from pepscript import PEPScript
|
|
82
|
+
|
|
83
|
+
script = PEPScript("my_script.py")
|
|
84
|
+
if script.has_metadata:
|
|
85
|
+
# Attribute access
|
|
86
|
+
line_length = script.meta.config.tool.ruff.line_length
|
|
87
|
+
# Item access (for keys with hyphens)
|
|
88
|
+
setting = script.meta.config.tool["my-tool"]["some-setting"]
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### Validate metadata
|
|
92
|
+
|
|
93
|
+
```python
|
|
94
|
+
from pepscript import PEPScript, MetadataValidationError
|
|
95
|
+
|
|
96
|
+
script = PEPScript("my_script.py", strict=False)
|
|
97
|
+
try:
|
|
98
|
+
script.validate()
|
|
99
|
+
except MetadataValidationError as e:
|
|
100
|
+
print(f"Invalid metadata: {e}")
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
## Contributing
|
|
104
|
+
|
|
105
|
+
### Setup
|
|
106
|
+
|
|
107
|
+
```bash
|
|
108
|
+
uv sync
|
|
109
|
+
```
|
|
110
|
+
|
|
111
|
+
### Running checks
|
|
112
|
+
|
|
113
|
+
```bash
|
|
114
|
+
uv run pytest # Tests
|
|
115
|
+
uv run ruff check . # Lint
|
|
116
|
+
uv run ruff format . # Format
|
|
117
|
+
uv run ty check . # Type check
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
### Versioning and releases
|
|
121
|
+
|
|
122
|
+
This project uses [Semantic Versioning](https://semver.org/). The version is set in `pyproject.toml`.
|
|
123
|
+
|
|
124
|
+
To release:
|
|
125
|
+
|
|
126
|
+
1. Update `version` in `pyproject.toml`
|
|
127
|
+
2. Tag and push:
|
|
128
|
+
```bash
|
|
129
|
+
git tag v0.2.0
|
|
130
|
+
git push --tags
|
|
131
|
+
```
|
|
132
|
+
|
|
133
|
+
Commit messages should follow [Conventional Commits](https://www.conventionalcommits.org/) (`fix:`, `feat:`, `feat!:` for breaking changes).
|
|
134
|
+
|
|
135
|
+
## License
|
|
136
|
+
|
|
137
|
+
MIT
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
pepscript/__init__.py,sha256=n4fZdoVoHITLmVGJeLltD5pkyJToivZnddiblL5ikm0,641
|
|
2
|
+
pepscript/config.py,sha256=GjBOT0bapLN2SKKT20sY7U06qMEicRBclzfIwV_H8Ss,5489
|
|
3
|
+
pepscript/exceptions.py,sha256=wnRj1en3Jk-sqpVJQQxhh6JclHQ8V7HuupGIejhcFx4,654
|
|
4
|
+
pepscript/io.py,sha256=W8LLz0Z7PZkWTGhkWx3t22tVok1D46VTOKUlGK1XUtQ,1005
|
|
5
|
+
pepscript/models.py,sha256=pWCQT0vzOM12oSgi9Un0OlMx9TW8LeBHOS5WAK0ofgU,2349
|
|
6
|
+
pepscript/parser.py,sha256=DxGFlRwof1JPU9RaB_RjEvrmNTvkqefpPqCS8wBfZwc,5023
|
|
7
|
+
pepscript/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
pepscript/script.py,sha256=i71cmKtecNHDdCm54vgmiCL4bdKSuthHA-90fcQfszI,7484
|
|
9
|
+
pepscript/serialize.py,sha256=XClSbYNu8vknutL4-13Rf31NVTjmv6RpCoODlVNlgx8,4221
|
|
10
|
+
pepscript/validate.py,sha256=IlPFCJeIntdaNPem4J6KGZz8WCZdRjrgfyu3ExMJryY,7028
|
|
11
|
+
pepscript-0.1.0.dist-info/WHEEL,sha256=Sb1dMJuf3wy6TqB8bzcqZpk8WSKCV8HbGz39HaP5dwE,81
|
|
12
|
+
pepscript-0.1.0.dist-info/METADATA,sha256=1Mhe3IZaP22lSBXHTZczUgx-qQOS1TG9PSmW2k5h3-c,3595
|
|
13
|
+
pepscript-0.1.0.dist-info/RECORD,,
|