ps-plugin-sdk 0.2.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,12 @@
1
+ Metadata-Version: 2.4
2
+ Name: ps-plugin-sdk
3
+ Version: 0.2.8
4
+ Summary:
5
+ Requires-Python: >=3.10,<3.14
6
+ Classifier: Programming Language :: Python :: 3
7
+ Classifier: Programming Language :: Python :: 3.10
8
+ Classifier: Programming Language :: Python :: 3.11
9
+ Classifier: Programming Language :: Python :: 3.12
10
+ Classifier: Programming Language :: Python :: 3.13
11
+ Requires-Dist: poetry (>=2.3.1,<3.0.0)
12
+ Requires-Dist: pydantic (>=2.12.5,<3.0.0)
@@ -0,0 +1,22 @@
1
+ [project]
2
+ name = "ps-plugin-sdk"
3
+ description = ""
4
+ requires-python = ">=3.10,<3.14"
5
+ version = "0.2.8"
6
+ dependencies = [
7
+ "poetry<3.0.0,>=2.3.1",
8
+ "pydantic<3.0.0,>=2.12.5",
9
+ ]
10
+
11
+ [tool.poetry.group.ps.dependencies]
12
+ ps-version = ">=0.2.8,<0.3.0"
13
+
14
+ [tool.poetry]
15
+ packages = [ { include = "ps/plugin/sdk", from = "src" } ]
16
+
17
+ [tool.ps-plugin]
18
+ host-project = ".."
19
+
20
+ [build-system]
21
+ requires = ["poetry-core>=1.0.0"]
22
+ build-backend = "poetry.core.masonry.api"
@@ -0,0 +1 @@
1
+ __all__ = []
@@ -0,0 +1,19 @@
1
+ from ._cleo_inputs import CommandOptionsProtocol, ensure_argument, ensure_option
2
+ from ._protocols import (
3
+ PoetryActivateProtocol,
4
+ PoetryCommandProtocol,
5
+ PoetryErrorProtocol,
6
+ PoetrySignalProtocol,
7
+ PoetryTerminateProtocol,
8
+ )
9
+
10
+ __all__ = [
11
+ "PoetryActivateProtocol",
12
+ "PoetryCommandProtocol",
13
+ "PoetryErrorProtocol",
14
+ "PoetrySignalProtocol",
15
+ "PoetryTerminateProtocol",
16
+ "ensure_argument",
17
+ "ensure_option",
18
+ "CommandOptionsProtocol",
19
+ ]
@@ -0,0 +1,25 @@
1
+ from typing import Protocol
2
+
3
+ from cleo.io.inputs.option import Option
4
+ from cleo.io.inputs.argument import Argument
5
+
6
+
7
+ class CommandOptionsProtocol(Protocol):
8
+ options: list[Option]
9
+ arguments: list[Argument]
10
+
11
+
12
+ def ensure_argument(command: CommandOptionsProtocol, argument: Argument) -> bool:
13
+ existing_names = {arg.name for arg in command.arguments}
14
+ if argument.name not in existing_names:
15
+ command.arguments.append(argument)
16
+ return True
17
+ return False
18
+
19
+
20
+ def ensure_option(command: CommandOptionsProtocol, option: Option) -> bool:
21
+ existing_names = {opt.name for opt in command.options}
22
+ if option.name not in existing_names:
23
+ command.options.append(option)
24
+ return True
25
+ return False
@@ -0,0 +1,32 @@
1
+ from cleo.events.console_command_event import ConsoleCommandEvent
2
+ from cleo.events.console_error_event import ConsoleErrorEvent
3
+ from cleo.events.console_signal_event import ConsoleSignalEvent
4
+ from cleo.events.console_terminate_event import ConsoleTerminateEvent
5
+ from cleo.events.event_dispatcher import EventDispatcher
6
+ from poetry.console.application import Application
7
+ from typing_extensions import Protocol, runtime_checkable
8
+
9
+
10
+ @runtime_checkable
11
+ class PoetryActivateProtocol(Protocol):
12
+ def poetry_activate(self, application: Application) -> bool: ...
13
+
14
+
15
+ @runtime_checkable
16
+ class PoetryCommandProtocol(Protocol):
17
+ def poetry_command(self, event: ConsoleCommandEvent, dispatcher: EventDispatcher) -> None: ...
18
+
19
+
20
+ @runtime_checkable
21
+ class PoetryErrorProtocol(Protocol):
22
+ def poetry_error(self, event: ConsoleErrorEvent, dispatcher: EventDispatcher) -> None: ...
23
+
24
+
25
+ @runtime_checkable
26
+ class PoetryTerminateProtocol(Protocol):
27
+ def poetry_terminate(self, event: ConsoleTerminateEvent, dispatcher: EventDispatcher) -> None: ...
28
+
29
+
30
+ @runtime_checkable
31
+ class PoetrySignalProtocol(Protocol):
32
+ def poetry_signal(self, event: ConsoleSignalEvent, dispatcher: EventDispatcher) -> None: ...
@@ -0,0 +1,8 @@
1
+ from ._logging import get_module_name, get_module_verbal_name, log_debug, log_verbose
2
+
3
+ __all__ = [
4
+ "get_module_name",
5
+ "get_module_verbal_name",
6
+ "log_debug",
7
+ "log_verbose",
8
+ ]
@@ -0,0 +1,35 @@
1
+ from typing import Type, Union
2
+
3
+ from cleo.io.io import IO
4
+
5
+
6
+ def get_module_verbal_name(obj: Union[Type, object], include_type: bool = False) -> str:
7
+ cls = obj if isinstance(obj, type) else type(obj)
8
+ cls_name = f"<fg=dark_gray>{cls.__module__}.{cls.__name__}</>"
9
+ name_attr = getattr(cls, "name", None)
10
+
11
+ if isinstance(name_attr, str):
12
+ return f"<fg=light_green>{name_attr}</> ({cls_name})" if include_type else name_attr
13
+
14
+ return cls_name
15
+
16
+
17
+ def get_module_name(obj: Union[Type, object]) -> str:
18
+ cls = obj if isinstance(obj, type) else type(obj)
19
+ cls_name = f"{cls.__module__}.{cls.__name__}"
20
+ name_attr = getattr(cls, "name", None)
21
+
22
+ if isinstance(name_attr, str):
23
+ return name_attr
24
+
25
+ return cls_name
26
+
27
+
28
+ def log_verbose(io: IO, message: str) -> None:
29
+ if io.is_verbose():
30
+ io.write_line(message)
31
+
32
+
33
+ def log_debug(io: IO, message: str) -> None:
34
+ if io.is_debug():
35
+ io.write_line(message)
@@ -0,0 +1,5 @@
1
+ from ._name_aware_protocol import NameAwareProtocol
2
+
3
+ __all__ = [
4
+ "NameAwareProtocol",
5
+ ]
@@ -0,0 +1,7 @@
1
+ from typing import ClassVar
2
+ from typing_extensions import Protocol, runtime_checkable
3
+
4
+
5
+ @runtime_checkable
6
+ class NameAwareProtocol(Protocol):
7
+ name: ClassVar[str]
@@ -0,0 +1,28 @@
1
+ from ._models import Project, ProjectDependency, ProjectFeedSource, SourcePriority, normalize_dist_name, dist_name_variants
2
+ from ._environment import Environment
3
+ from ._projects_helper import filter_projects
4
+ from ._parsers import (
5
+ parse_name_from_document,
6
+ parse_dependencies_from_document,
7
+ parse_version_from_document,
8
+ parse_project,
9
+ parse_sources_from_document,
10
+ parse_source_dirs_from_document,
11
+ )
12
+
13
+ __all__ = [
14
+ "Project",
15
+ "ProjectDependency",
16
+ "ProjectFeedSource",
17
+ "SourcePriority",
18
+ "Environment",
19
+ "filter_projects",
20
+ "normalize_dist_name",
21
+ "dist_name_variants",
22
+ "parse_name_from_document",
23
+ "parse_dependencies_from_document",
24
+ "parse_version_from_document",
25
+ "parse_project",
26
+ "parse_sources_from_document",
27
+ "parse_source_dirs_from_document",
28
+ ]
@@ -0,0 +1,106 @@
1
+ from pathlib import Path
2
+ from typing import Iterable
3
+ import tempfile
4
+ import shutil
5
+ import hashlib
6
+
7
+ from ._models import Project
8
+ from ._parsers import parse_project
9
+
10
+
11
+ class Environment:
12
+ _host_project: Project
13
+ _projects: dict[Path, Project]
14
+ _backups: dict[Path, Path]
15
+
16
+ def __init__(self, entry_project_path: Path):
17
+ self._projects = {}
18
+ self._backups = {}
19
+ self._entry_project = self.add_project(entry_project_path, is_host=True)
20
+
21
+ @property
22
+ def host_project(self) -> Project:
23
+ return self._host_project
24
+
25
+ @property
26
+ def entry_project(self) -> Project:
27
+ return self._entry_project
28
+
29
+ @property
30
+ def projects(self) -> Iterable[Project]:
31
+ return self._projects.values()
32
+
33
+ def project_dependencies(self, project: Project) -> list[Project]:
34
+ result: list[Project] = []
35
+ for dep in project.dependencies:
36
+ resolved = dep.resolved_project_path
37
+ if resolved and resolved in self._projects:
38
+ result.append(self._projects[resolved])
39
+ return result
40
+
41
+ def sorted_projects(self, projects: Iterable[Project]) -> list[Project]:
42
+ project_list = list(projects)
43
+ project_set = {p.path for p in project_list}
44
+ result: list[Project] = []
45
+ visited: set[Path] = set()
46
+
47
+ def _visit(p: Project) -> None:
48
+ if p.path in visited:
49
+ return
50
+ visited.add(p.path)
51
+ for dep in self.project_dependencies(p):
52
+ if dep.path in project_set:
53
+ _visit(dep)
54
+ result.append(p)
55
+
56
+ for p in project_list:
57
+ _visit(p)
58
+ return result
59
+
60
+ def add_project(self, project_path: Path, is_host: bool = False) -> Project:
61
+ project_path = project_path.resolve()
62
+ if project_path.is_dir():
63
+ project_path = project_path / "pyproject.toml"
64
+ if project_path in self._projects:
65
+ project = self._projects[project_path]
66
+ else:
67
+ project = parse_project(project_path)
68
+ if project is None:
69
+ raise FileNotFoundError(f"Project file not found at path: {project_path}")
70
+ self._projects[project_path] = project
71
+ if is_host:
72
+ self._host_project = project
73
+ for dependency in project.dependencies:
74
+ if dependency.path and dependency.develop is not False:
75
+ self.add_project(dependency.path)
76
+ if project.plugin_settings.host_project:
77
+ host_project_path = Path(project.path.parent / project.plugin_settings.host_project).resolve()
78
+ if host_project_path.is_dir():
79
+ host_project_path = host_project_path / "pyproject.toml"
80
+ self.add_project(host_project_path, is_host=True)
81
+
82
+ return project
83
+
84
+ def backup_projects(self, projects: Iterable[Project]) -> None:
85
+ for project in projects:
86
+ pyproject_path = project.path
87
+ if pyproject_path in self._backups:
88
+ continue
89
+
90
+ temp_dir = Path(tempfile.gettempdir())
91
+ path_hash = hashlib.sha256(str(pyproject_path).encode()).hexdigest()[:16]
92
+ backup_path = temp_dir / f"pyproject_backup_{path_hash}.toml"
93
+ shutil.copy2(pyproject_path, backup_path)
94
+ self._backups[pyproject_path] = backup_path
95
+
96
+ def restore_projects(self, projects: Iterable[Project]) -> None:
97
+ for project in projects:
98
+ pyproject_path = project.path
99
+ if pyproject_path not in self._backups:
100
+ continue
101
+
102
+ backup_path = self._backups[pyproject_path]
103
+ if backup_path.exists():
104
+ shutil.copy2(backup_path, pyproject_path)
105
+ backup_path.unlink()
106
+ del self._backups[pyproject_path]
@@ -0,0 +1,245 @@
1
+ import os
2
+ from enum import StrEnum
3
+ from pathlib import Path
4
+ from typing import Any, Optional
5
+ from pydantic import BaseModel, ConfigDict, Field
6
+ from tomlkit import TOMLDocument, inline_table, table as toml_table
7
+ from packaging.requirements import Requirement
8
+ from packaging.specifiers import SpecifierSet
9
+
10
+ from ps.version import Version, VersionConstraint
11
+
12
+ from ..settings._settings import PluginSettings
13
+ from ..toml._toml_value import TomlValue
14
+
15
+
16
+ def normalize_dist_name(name: str) -> str:
17
+ return name.lower().replace("-", "_").replace(".", "_")
18
+
19
+
20
+ def dist_name_variants(name: str) -> set[str]:
21
+ lower = name.lower()
22
+ return {
23
+ lower,
24
+ lower.replace("-", "_").replace(".", "_"),
25
+ lower.replace("_", "-"),
26
+ }
27
+
28
+
29
+ class SourcePriority(StrEnum):
30
+ DEFAULT = "default"
31
+ PRIMARY = "primary"
32
+ SECONDARY = "secondary"
33
+ SUPPLEMENTAL = "supplemental"
34
+ EXPLICIT = "explicit"
35
+
36
+
37
+ class ProjectFeedSource(BaseModel):
38
+ name: str
39
+ url: Optional[str] = None
40
+ priority: Optional[SourcePriority] = None
41
+
42
+
43
+ class ProjectDependency(BaseModel):
44
+ model_config = ConfigDict(arbitrary_types_allowed=True)
45
+
46
+ location: TomlValue = Field(exclude=True)
47
+ name: Optional[str] = None
48
+
49
+ # Classification
50
+ group: Optional[str] = None
51
+ optional: Optional[bool] = None
52
+ python: Optional[str] = None
53
+ markers: Optional[str] = None
54
+ extras: Optional[list[str]] = None
55
+ source: Optional[str] = None
56
+
57
+ # Location-based
58
+ path: Optional[Path] = None
59
+ develop: Optional[bool] = None
60
+ url: Optional[str] = None
61
+
62
+ # VCS
63
+ git: Optional[str] = None
64
+ branch: Optional[str] = None
65
+ tag: Optional[str] = None
66
+ rev: Optional[str] = None
67
+
68
+ @property
69
+ def version(self) -> Optional[str]:
70
+ if not self.location.exists:
71
+ return None
72
+
73
+ current_value = self.location.value
74
+ if isinstance(current_value, str):
75
+ # Either PEP 508 string or Poetry simple string
76
+ try:
77
+ req = Requirement(current_value)
78
+ return str(req.specifier) if req.specifier else "*"
79
+ except (ValueError, TypeError):
80
+ # Poetry simple string format
81
+ return current_value
82
+ elif isinstance(current_value, dict):
83
+ # Poetry dict format
84
+ version = current_value.get("version")
85
+ return str(version) if version is not None else None
86
+
87
+ return None
88
+
89
+ @property
90
+ def version_constraint(self) -> Optional[SpecifierSet]:
91
+ version_str = self.version
92
+ if not version_str:
93
+ return None
94
+ try:
95
+ # Handle Poetry wildcard (any version)
96
+ if version_str == "*":
97
+ return SpecifierSet("")
98
+ # Convert Poetry caret syntax to PEP 440
99
+ if version_str.startswith("^"):
100
+ version_str = self._convert_caret_to_pep440(version_str[1:])
101
+ return SpecifierSet(version_str)
102
+ except (ValueError, TypeError):
103
+ return None
104
+
105
+ def _convert_caret_to_pep440(self, version_str: str) -> str:
106
+ try:
107
+ version = Version.parse(version_str)
108
+ assert version is not None, "Invalid version for caret syntax"
109
+ return version.get_constraint(VersionConstraint.COMPATIBLE)
110
+ except Exception:
111
+ # Fallback to original if parsing fails
112
+ return f"^{version_str}"
113
+
114
+ def update_version(self, version_constraint: str | SpecifierSet) -> None:
115
+ if not self.location.exists:
116
+ return
117
+
118
+ # Convert SpecifierSet to string
119
+ constraint_str = str(version_constraint) if isinstance(version_constraint, SpecifierSet) else version_constraint
120
+
121
+ current_value = self.location.value
122
+
123
+ if isinstance(current_value, str):
124
+ # PEP 621/508 format: reconstruct the requirement string
125
+ self._update_pep508_version(constraint_str)
126
+ elif isinstance(current_value, dict):
127
+ # Poetry dict format
128
+ # Switch path/VCS/url dependencies to plain constraint
129
+ # to avoid invalid mixed-table definitions.
130
+ is_source_dependency = any(key in current_value for key in ("path", "git", "url"))
131
+ if is_source_dependency:
132
+ self.location.set(constraint_str)
133
+ else:
134
+ current_value["version"] = constraint_str
135
+ self.location.set(current_value)
136
+ else:
137
+ # Poetry simple string format: replace entire value
138
+ self.location.set(constraint_str)
139
+
140
+ def _update_pep508_version(self, version_constraint: str) -> None:
141
+ try:
142
+ current = self.location.value
143
+ if not isinstance(current, str):
144
+ return
145
+
146
+ req = Requirement(current)
147
+
148
+ # Reconstruct with new version constraint
149
+ parts = [req.name]
150
+ if self.extras:
151
+ parts.append(f"[{','.join(self.extras)}]")
152
+ parts.append(version_constraint)
153
+ if req.marker:
154
+ parts.append(f"; {req.marker}")
155
+
156
+ self.location.set("".join(parts))
157
+ except (ValueError, TypeError):
158
+ # Fallback: just set the version string
159
+ self.location.set(version_constraint)
160
+
161
+ @property
162
+ def resolved_project_path(self) -> Optional[Path]:
163
+ if self.path is None:
164
+ return None
165
+ if self.path.suffix == ".toml":
166
+ return self.path.resolve()
167
+ return (self.path / "pyproject.toml").resolve()
168
+
169
+
170
+ class Project(BaseModel):
171
+ model_config = ConfigDict(arbitrary_types_allowed=True)
172
+
173
+ name: TomlValue
174
+ version: TomlValue
175
+ path: Path
176
+ document: TOMLDocument = Field(exclude=True)
177
+ dependencies: list[ProjectDependency]
178
+ sources: list[ProjectFeedSource]
179
+ source_dirs: list[Path]
180
+ plugin_settings: PluginSettings
181
+
182
+ def save(self) -> None:
183
+ with open(self.path, "w") as f:
184
+ f.write(self.document.as_string())
185
+
186
+ def add_dependency(
187
+ self,
188
+ name: str,
189
+ constraint: Optional[str] = None,
190
+ group: Optional[str] = None,
191
+ ) -> "ProjectDependency":
192
+ entry: Any = constraint if constraint is not None else "*"
193
+ return self._write_dependency(name, entry, resolved_path=None, develop=None, group=group)
194
+
195
+ def add_development_dependency(
196
+ self,
197
+ name: str,
198
+ path: Path,
199
+ group: Optional[str] = None,
200
+ ) -> "ProjectDependency":
201
+ project_dir = self.path.parent
202
+ entry: Any = inline_table()
203
+ rel = Path(os.path.relpath(path, project_dir))
204
+ entry.append("path", str(rel).replace("\\", "/"))
205
+ entry.append("develop", True)
206
+ return self._write_dependency(name, entry, resolved_path=path.resolve(), develop=True, group=group)
207
+
208
+ def _write_dependency(
209
+ self,
210
+ name: str,
211
+ entry: Any,
212
+ resolved_path: Optional[Path],
213
+ develop: Optional[bool],
214
+ group: Optional[str],
215
+ ) -> "ProjectDependency":
216
+ if group is None:
217
+ dep_section = "tool.poetry.dependencies"
218
+ deps_table = self._get_or_create_table("tool", "poetry", "dependencies")
219
+ else:
220
+ dep_section = f"tool.poetry.group.{group}.dependencies"
221
+ group_table = self._get_or_create_table("tool", "poetry", "group", group)
222
+ if "dependencies" not in group_table:
223
+ group_table["dependencies"] = toml_table()
224
+ deps_table = group_table["dependencies"]
225
+
226
+ deps_table[name] = entry
227
+ location = TomlValue.locate(self.document, [f"{dep_section}.{name}"])
228
+
229
+ dep = ProjectDependency(
230
+ location=location,
231
+ name=name,
232
+ group=group,
233
+ path=resolved_path,
234
+ develop=develop,
235
+ )
236
+ self.dependencies.append(dep)
237
+ return dep
238
+
239
+ def _get_or_create_table(self, *keys: str) -> Any:
240
+ current: Any = self.document
241
+ for key in keys:
242
+ if key not in current:
243
+ current[key] = toml_table()
244
+ current = current[key]
245
+ return current
@@ -0,0 +1,154 @@
1
+ from pathlib import Path
2
+ from typing import Any, Optional
3
+ from tomlkit import TOMLDocument, parse
4
+ from packaging.requirements import Requirement
5
+
6
+ from ._models import Project, ProjectDependency, ProjectFeedSource, SourcePriority
7
+ from ..toml._toml_value import TomlValue
8
+ from ..settings._parsers import parse_plugin_settings_from_document
9
+
10
+
11
+ def parse_name_from_document(document: TOMLDocument) -> TomlValue:
12
+ return TomlValue.locate(document, ["project.name", "tool.poetry.name"])
13
+
14
+
15
+ def parse_version_from_document(document: TOMLDocument) -> TomlValue:
16
+ return TomlValue.locate(document, ["project.version", "tool.poetry.version"])
17
+
18
+
19
+ def parse_dependency(document: TOMLDocument, dep_path: str, name: str, value: Any, group: Optional[str], project_path: Optional[Path] = None) -> Optional[ProjectDependency]:
20
+ location = TomlValue.locate(document, [dep_path])
21
+ if isinstance(value, str):
22
+ return ProjectDependency(
23
+ location=location,
24
+ name=name,
25
+ group=group,
26
+ )
27
+ if isinstance(value, dict):
28
+ local_path = None
29
+ if value.get("path"):
30
+ local_path = Path(value["path"])
31
+ if project_path and not local_path.is_absolute():
32
+ local_path = (project_path.parent / local_path).resolve()
33
+ return ProjectDependency(
34
+ location=location,
35
+ name=name,
36
+ group=group,
37
+ optional=value.get("optional"),
38
+ python=value.get("python"),
39
+ markers=value.get("markers"),
40
+ extras=list(value.get("extras", [])) or None,
41
+ source=value.get("source"),
42
+ path=local_path,
43
+ develop=value.get("develop"),
44
+ url=value.get("url"),
45
+ git=value.get("git"),
46
+ branch=value.get("branch"),
47
+ tag=value.get("tag"),
48
+ rev=value.get("rev"),
49
+ )
50
+
51
+ return None
52
+
53
+
54
+ def parse_dependencies_from_document(document: TOMLDocument, project_path: Optional[Path] = None) -> list[ProjectDependency]:
55
+ dependencies = []
56
+
57
+ # Support PEP 621 format (array of PEP 508 strings)
58
+ pep621_deps = TomlValue.locate(document, ["project.dependencies"]).value
59
+ if pep621_deps and isinstance(pep621_deps, list):
60
+ for idx, dep_string in enumerate(pep621_deps):
61
+ if not isinstance(dep_string, str):
62
+ continue
63
+
64
+ try:
65
+ req = Requirement(dep_string)
66
+ dep_path = f"project.dependencies[{idx}]"
67
+ location = TomlValue.locate(document, [dep_path])
68
+
69
+ dependencies.append(ProjectDependency(
70
+ location=location,
71
+ name=req.name,
72
+ extras=list(req.extras) if req.extras else None,
73
+ markers=str(req.marker) if req.marker else None,
74
+ group=None,
75
+ ))
76
+ except (ValueError, TypeError):
77
+ # Skip invalid dependency specifications
78
+ continue
79
+
80
+ # Support Poetry format (dictionary)
81
+ main_deps = TomlValue.locate(document, ["tool.poetry.dependencies"]).value or {}
82
+ for name, value in main_deps.items():
83
+ if name == "python":
84
+ continue
85
+ dep_path = f"tool.poetry.dependencies.{name}"
86
+ dep = parse_dependency(document, dep_path, name, value, group=None, project_path=project_path)
87
+ if dep:
88
+ dependencies.append(dep)
89
+
90
+ groups = TomlValue.locate(document, ["tool.poetry.group"]).value or {}
91
+ for group_name, group_data in groups.items():
92
+ for name, value in group_data.get("dependencies", {}).items():
93
+ dep_path = f"tool.poetry.group.{group_name}.dependencies.{name}"
94
+ dep = parse_dependency(document, dep_path, name, value, group=group_name, project_path=project_path)
95
+ if dep:
96
+ dependencies.append(dep)
97
+
98
+ return dependencies
99
+
100
+
101
+ def parse_sources_from_document(document: TOMLDocument) -> list[ProjectFeedSource]:
102
+ sources_value = TomlValue.locate(document, ["tool.poetry.source"]).value
103
+ if not sources_value or not isinstance(sources_value, list):
104
+ return []
105
+
106
+ result = []
107
+ for entry in sources_value:
108
+ if not isinstance(entry, dict) or "name" not in entry:
109
+ continue
110
+ priority_raw = entry.get("priority")
111
+ priority = SourcePriority(priority_raw) if priority_raw in SourcePriority._value2member_map_ else None
112
+ result.append(ProjectFeedSource(
113
+ name=entry["name"],
114
+ url=entry.get("url"),
115
+ priority=priority,
116
+ ))
117
+ return result
118
+
119
+
120
+ def parse_source_dirs_from_document(document: TOMLDocument, project_path: Path) -> list[Path]:
121
+ project_dir = project_path.parent
122
+ dirs: list[Path] = []
123
+ packages = TomlValue.locate(document, ["tool.poetry.packages"]).value or []
124
+ for entry in packages:
125
+ if not isinstance(entry, dict):
126
+ continue
127
+ include = entry.get("include")
128
+ if not include:
129
+ continue
130
+ base = project_dir / entry["from"] if entry.get("from") else project_dir
131
+ dirs.append((base / include).resolve())
132
+ return dirs or [project_dir]
133
+
134
+
135
+ def parse_project(project_path: Path) -> Optional[Project]:
136
+ project_path = project_path.resolve()
137
+ if project_path.is_dir():
138
+ project_path = project_path / "pyproject.toml"
139
+
140
+ if not project_path.exists():
141
+ return None
142
+
143
+ with project_path.open('r', encoding='utf-8') as f:
144
+ data = parse(f.read())
145
+ return Project(
146
+ name=parse_name_from_document(data),
147
+ version=parse_version_from_document(data),
148
+ path=project_path,
149
+ document=data,
150
+ dependencies=parse_dependencies_from_document(data, project_path),
151
+ sources=parse_sources_from_document(data),
152
+ source_dirs=parse_source_dirs_from_document(data, project_path),
153
+ plugin_settings=parse_plugin_settings_from_document(data),
154
+ )
@@ -0,0 +1,31 @@
1
+ from pathlib import Path
2
+ from typing import Iterable, Optional
3
+
4
+ from ._models import Project
5
+
6
+
7
+ def filter_projects(inputs: list[str], projects: Iterable[Project]) -> list[Project]:
8
+ def _matches_reference(input: str, project_path: Path, project_name: Optional[str]) -> bool:
9
+ if input == project_name:
10
+ return True
11
+ try:
12
+ return Path(input).resolve().is_relative_to(project_path.parent)
13
+ except ValueError:
14
+ return False
15
+
16
+ unique_projects: dict[Path, Project] = {}
17
+ for project in projects:
18
+ unique_projects[project.path] = project
19
+
20
+ if not inputs:
21
+ selected_projects = unique_projects
22
+ else:
23
+ selected_projects = dict[Path, Project]()
24
+ sorted_projects = sorted(unique_projects.values(), key=lambda item: len(str(item.path)), reverse=True)
25
+ for ref in inputs:
26
+ for project in sorted_projects:
27
+ if _matches_reference(ref, project.path, project.name.value):
28
+ selected_projects[project.path] = project
29
+ break
30
+
31
+ return list(selected_projects.values())
@@ -0,0 +1,7 @@
1
+ from ._settings import PluginSettings
2
+ from ._parsers import parse_plugin_settings_from_document
3
+
4
+ __all__ = [
5
+ "PluginSettings",
6
+ "parse_plugin_settings_from_document",
7
+ ]
@@ -0,0 +1,15 @@
1
+ from tomlkit import TOMLDocument
2
+
3
+ from ..toml._toml_value import TomlValue
4
+ from ._settings import PluginSettings
5
+
6
+
7
+ def parse_plugin_settings_from_document(document: TOMLDocument) -> PluginSettings:
8
+ project_toml = document
9
+ settings_section = TomlValue.locate(project_toml, [f"tool.{PluginSettings.NAME}"]).value
10
+ if settings_section is None:
11
+ return PluginSettings(enabled=False)
12
+ result = PluginSettings.model_validate(settings_section, by_alias=True)
13
+ if result.enabled is None:
14
+ result.enabled = True
15
+ return result
@@ -0,0 +1,16 @@
1
+ from pathlib import Path
2
+ from typing import ClassVar, Optional
3
+ from pydantic import BaseModel, ConfigDict, Field
4
+
5
+
6
+ class PluginSettings(BaseModel):
7
+ NAME: ClassVar[str] = "ps-plugin"
8
+
9
+ enabled: Optional[bool] = Field(default=None, exclude=True)
10
+ host_project: Optional[Path] = Field(default=None, alias="host-project")
11
+ modules: Optional[list[str]] = Field(default=None, alias="modules")
12
+
13
+ model_config = ConfigDict(
14
+ extra="allow",
15
+ populate_by_name=True,
16
+ )
@@ -0,0 +1,5 @@
1
+ from ._toml_value import TomlValue
2
+
3
+ __all__ = [
4
+ "TomlValue",
5
+ ]
@@ -0,0 +1,139 @@
1
+ import re
2
+ from typing import Any, List, Optional, Tuple
3
+
4
+ from pydantic import BaseModel, ConfigDict, Field, computed_field
5
+ from tomlkit import TOMLDocument
6
+ from tomlkit.items import AoT, Table
7
+
8
+
9
+ Token = Tuple[str, Any] # ("key", str) or ("index", int)
10
+
11
+
12
+ def _split_dotted(dotted: str) -> List[str]:
13
+ return re.findall(r'"[^"]*"|[^.]+', dotted)
14
+
15
+
16
+ def _parse_jsonpath(path: str) -> List[Token]:
17
+ if not path or path[0] != "$":
18
+ raise ValueError('JSONPath must start with "$"')
19
+
20
+ i, tokens = 1, []
21
+
22
+ while i < len(path):
23
+ if path[i] == ".":
24
+ i += 1
25
+ if i >= len(path):
26
+ raise ValueError(f"Unexpected end after '.' in JSONPath: {path!r}")
27
+ if path[i] == '"':
28
+ i += 1
29
+ start = i
30
+ while i < len(path) and path[i] != '"':
31
+ i += 1
32
+ if i >= len(path):
33
+ raise ValueError(f"Unterminated quoted key in JSONPath: {path!r}")
34
+ tokens.append(("key", path[start:i]))
35
+ i += 1
36
+ else:
37
+ if not (path[i].isalpha() or path[i] == "_"):
38
+ raise ValueError(f"Invalid key start at position {i} in JSONPath: {path!r}")
39
+ start = i
40
+ while i < len(path) and (path[i].isalnum() or path[i] in "_-"):
41
+ i += 1
42
+ tokens.append(("key", path[start:i]))
43
+ elif path[i] == "[":
44
+ i += 1
45
+ if i >= len(path) or not path[i].isdigit():
46
+ raise ValueError(f"Expected digit after '[' at position {i} in JSONPath: {path!r}")
47
+ start = i
48
+ while i < len(path) and path[i].isdigit():
49
+ i += 1
50
+ if i >= len(path) or path[i] != "]":
51
+ raise ValueError(f"Expected ']' after index at position {i} in JSONPath: {path!r}")
52
+ tokens.append(("index", int(path[start:i])))
53
+ i += 1
54
+ else:
55
+ raise ValueError(f"Unexpected character {path[i]!r} at position {i} in JSONPath: {path!r}")
56
+
57
+ return tokens
58
+
59
+
60
+ def _get_by_jsonpath(document: Any, jsonpath: str) -> Any:
61
+ cur = document
62
+ for _, val in _parse_jsonpath(jsonpath):
63
+ cur = cur[val]
64
+ return cur
65
+
66
+
67
+ def _set_by_jsonpath(document: Any, jsonpath: str, new_value: Any) -> None:
68
+ tokens = _parse_jsonpath(jsonpath)
69
+ if not tokens:
70
+ raise ValueError("Cannot set root '$'")
71
+
72
+ cur = document
73
+ for _, val in tokens[:-1]:
74
+ cur = cur[val]
75
+
76
+ cur[tokens[-1][1]] = new_value
77
+
78
+
79
+ def _find_first_existing_jsonpath(document: Any, dotted: str) -> Optional[str]:
80
+ def rec(node: Any, parts: List[str], path: str) -> Optional[str]:
81
+ if not parts:
82
+ return path
83
+ head, *tail = parts
84
+ lookup_key = head[1:-1] if head.startswith('"') and head.endswith('"') else head
85
+ needs_quote = not all(c.isalnum() or c in "_-" for c in lookup_key)
86
+ jp_segment = f'."{lookup_key}"' if needs_quote else f".{lookup_key}"
87
+ if isinstance(node, (TOMLDocument, Table, dict)):
88
+ return rec(node[lookup_key], tail, f"{path}{jp_segment}") if lookup_key in node else None
89
+ if isinstance(node, (AoT, list)):
90
+ for i, item in enumerate(node):
91
+ if result := rec(item, parts, f"{path}[{i}]"):
92
+ return result
93
+ return None
94
+
95
+ return rec(document, _split_dotted(dotted), "$")
96
+
97
+
98
+ class TomlValue(BaseModel):
99
+ model_config = ConfigDict(arbitrary_types_allowed=True)
100
+
101
+ document: TOMLDocument = Field(exclude=True)
102
+ jsonpath: Optional[str] = None
103
+
104
+ @property
105
+ def exists(self) -> bool:
106
+ if not self.jsonpath:
107
+ return False
108
+ try:
109
+ _get_by_jsonpath(self.document, self.jsonpath)
110
+ return True
111
+ except (KeyError, IndexError, Exception):
112
+ return False
113
+
114
+ @computed_field
115
+ @property
116
+ def value(self) -> Optional[Any]:
117
+ if not self.jsonpath:
118
+ return None
119
+ try:
120
+ return _get_by_jsonpath(self.document, self.jsonpath)
121
+ except (KeyError, IndexError, Exception):
122
+ return None
123
+
124
+ def set(self, new_value: Any) -> None:
125
+ if not self.jsonpath:
126
+ raise ValueError("Path not found")
127
+ _set_by_jsonpath(self.document, self.jsonpath, new_value)
128
+
129
+ @staticmethod
130
+ def locate(document: TOMLDocument, dotted_candidates: List[str]) -> 'TomlValue':
131
+ for dotted in dotted_candidates:
132
+ jp = _find_first_existing_jsonpath(document, dotted)
133
+ if jp:
134
+ return TomlValue(document=document, jsonpath=jp)
135
+ # If no existing path found, use first candidate as fallback
136
+ if dotted_candidates:
137
+ fallback_path = f"$.{dotted_candidates[0]}"
138
+ return TomlValue(document=document, jsonpath=fallback_path)
139
+ return TomlValue(document=document, jsonpath=None)