openrewrite-remote 0.13.2__py3-none-any.whl → 0.13.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {openrewrite_remote-0.13.2.dist-info → openrewrite_remote-0.13.4.dist-info}/METADATA +1 -1
- openrewrite_remote-0.13.4.dist-info/RECORD +23 -0
- rewrite_remote/handlers/__init__.py +1 -0
- rewrite_remote/handlers/handler_helpers.py +16 -0
- rewrite_remote/handlers/hello_world_handler.py +40 -0
- rewrite_remote/handlers/list_projects_handler.py +66 -0
- rewrite_remote/handlers/project_helper.py +196 -0
- rewrite_remote/handlers/pypi_manager.py +300 -0
- rewrite_remote/handlers/recipe_install_handler.py +152 -0
- rewrite_remote/handlers/run_recipe_load_and_visitor_handler.py +135 -0
- rewrite_remote/handlers/types.py +30 -0
- rewrite_remote/receiver.py +41 -23
- rewrite_remote/remoting.py +5 -12
- rewrite_remote/sender.py +92 -103
- rewrite_remote/server.py +1 -2
- openrewrite_remote-0.13.2.dist-info/RECORD +0 -14
- {openrewrite_remote-0.13.2.dist-info → openrewrite_remote-0.13.4.dist-info}/WHEEL +0 -0
- {openrewrite_remote-0.13.2.dist-info → openrewrite_remote-0.13.4.dist-info}/entry_points.txt +0 -0
- {openrewrite_remote-0.13.2.dist-info → openrewrite_remote-0.13.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,23 @@
|
|
1
|
+
rewrite_remote/__init__.py,sha256=uuLrPH--ewvE-5owXbNItXDfjCypMXQgsm-72hO_dtc,286
|
2
|
+
rewrite_remote/client.py,sha256=95ZCAtVOngF0ZqqKnOsrweUeGKruf3UKGPXNGTrNyy0,1853
|
3
|
+
rewrite_remote/event.py,sha256=texLJD1mcFkpBpiXAa-Rmip0Tgqm2OlBpRPHFZyWcBs,359
|
4
|
+
rewrite_remote/receiver.py,sha256=9oIdupOcV9z6eY-c2uFL1yfujbTQTgbXgTPG9Qx_ipc,20008
|
5
|
+
rewrite_remote/remote_utils.py,sha256=wUo9WZoldgCLihFJGf6RaE1SufhDiEPCFlX74tcODVM,10552
|
6
|
+
rewrite_remote/remoting.py,sha256=s0qVJlKTKhLZw8-9CJB1dJ4sPeKVkwIldI2WYEGULHY,13365
|
7
|
+
rewrite_remote/sender.py,sha256=z42hmAIXai_dazQ-DhMwv_fDSi0AA2orUT2juWX2UJg,20056
|
8
|
+
rewrite_remote/server.py,sha256=LDk4mUs33DX1s8HMCh3rC5lkpN8luyDu1CLeDQXczlY,9343
|
9
|
+
rewrite_remote/type_utils.py,sha256=oVrB0olWFSCqhmg2nTU2wrwiAU7kBCUscjwdHK7gf3Y,4219
|
10
|
+
rewrite_remote/handlers/__init__.py,sha256=ED6jHcYiuYpr_0vjGz0zx2lrrmJT9sDJCzIljoDfmlM,65
|
11
|
+
rewrite_remote/handlers/handler_helpers.py,sha256=CIAXtlzrsE-z5RmzlXBZihT1rfrRLHFttq8Ni_8AH9U,516
|
12
|
+
rewrite_remote/handlers/hello_world_handler.py,sha256=NQScHfCJKofdImBgELL8tXtG_KnVyXj683C3Ae7xcKU,1298
|
13
|
+
rewrite_remote/handlers/list_projects_handler.py,sha256=ukkCST7whpCxQ0omCWWw9W27Su3rFSnnEIhAS6T4iOU,2007
|
14
|
+
rewrite_remote/handlers/project_helper.py,sha256=_zGzniyEgmBB4k14Oy4Sm7B3Lt1MoXKQLOJxofEELGI,7130
|
15
|
+
rewrite_remote/handlers/pypi_manager.py,sha256=z4JJarLkNRztQaSLHCh5GViag7ce7E36ARBDqJLQREc,10269
|
16
|
+
rewrite_remote/handlers/recipe_install_handler.py,sha256=D6n5d84pS-HBWNiND-qZt-SfB33uFHV0ntZf2ZQ1Imo,4942
|
17
|
+
rewrite_remote/handlers/run_recipe_load_and_visitor_handler.py,sha256=Pppn90g2z0z3U9m4_3a3X9Wu0Qh_pvkhoxWgadjAHRs,4146
|
18
|
+
rewrite_remote/handlers/types.py,sha256=5TK_oFp-7iy1iACWisuQJsa_WHTsqcPrV9nQTZ-_xgo,518
|
19
|
+
openrewrite_remote-0.13.4.dist-info/METADATA,sha256=jL-ze_dwxBxu6JQADFZO2s_QJLF7IxPJwEQdU1pTLaI,386
|
20
|
+
openrewrite_remote-0.13.4.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
21
|
+
openrewrite_remote-0.13.4.dist-info/entry_points.txt,sha256=SMukuF7TPjQr3IZIcH8f98-_QBCqYSbYXYrVv-5UzRI,69
|
22
|
+
openrewrite_remote-0.13.4.dist-info/top_level.txt,sha256=ansTioSZ-62aH3F2L3d1Bua0pJF4GOtgQ1PpG-CzcP0,15
|
23
|
+
openrewrite_remote-0.13.4.dist-info/RECORD,,
|
@@ -0,0 +1 @@
|
|
1
|
+
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
@@ -0,0 +1,16 @@
|
|
1
|
+
import logging
|
2
|
+
import socket
|
3
|
+
import cbor2
|
4
|
+
|
5
|
+
from rewrite_remote.remote_utils import COMMAND_END
|
6
|
+
from rewrite_remote import RemotingMessageType, OK, ERROR
|
7
|
+
|
8
|
+
|
9
|
+
def respond_with_error(message: str, sock: socket.socket) -> None:
|
10
|
+
logging.error(f"[Server] Error: {message}")
|
11
|
+
encoded_message = b""
|
12
|
+
encoded_message += cbor2.dumps(RemotingMessageType.Response)
|
13
|
+
encoded_message += cbor2.dumps(ERROR)
|
14
|
+
encoded_message += cbor2.dumps(message)
|
15
|
+
encoded_message += COMMAND_END
|
16
|
+
sock.sendall(encoded_message)
|
@@ -0,0 +1,40 @@
|
|
1
|
+
from io import BytesIO
|
2
|
+
import traceback
|
3
|
+
import socket
|
4
|
+
|
5
|
+
import cbor2
|
6
|
+
|
7
|
+
from rewrite_remote import RemotingContext, RemotingMessageType, OK, ERROR
|
8
|
+
|
9
|
+
|
10
|
+
def hello_world_handler(
|
11
|
+
stream: BytesIO, sock: socket.socket, remoting_ctx: RemotingContext
|
12
|
+
) -> None:
|
13
|
+
"""
|
14
|
+
A simple handler that responds to hello world command and response with string world
|
15
|
+
"""
|
16
|
+
try:
|
17
|
+
remoting_ctx.reset()
|
18
|
+
request = cbor2.load(stream)
|
19
|
+
|
20
|
+
if request != "hello":
|
21
|
+
raise ValueError(f"Unexpected request: {request}")
|
22
|
+
else:
|
23
|
+
print("Did not receive 'hello' ")
|
24
|
+
|
25
|
+
# Prepare a response
|
26
|
+
response_stream = BytesIO()
|
27
|
+
cbor2.dump(RemotingMessageType.Response, response_stream)
|
28
|
+
cbor2.dump(OK, response_stream)
|
29
|
+
cbor2.dump("world", response_stream)
|
30
|
+
sock.sendall(response_stream.getvalue())
|
31
|
+
|
32
|
+
except (socket.error, cbor2.CBORDecodeError) as e:
|
33
|
+
print(f"Error in hello handler: {e}")
|
34
|
+
traceback.print_exc()
|
35
|
+
# Send an error response if something goes wrong
|
36
|
+
response_stream = BytesIO()
|
37
|
+
cbor2.dump(RemotingMessageType.Response, response_stream)
|
38
|
+
cbor2.dump(ERROR, response_stream)
|
39
|
+
cbor2.dump(traceback.format_exc(), response_stream)
|
40
|
+
sock.sendall(response_stream.getvalue())
|
@@ -0,0 +1,66 @@
|
|
1
|
+
import logging
|
2
|
+
import socket
|
3
|
+
from io import BytesIO
|
4
|
+
from typing import List
|
5
|
+
from cbor2 import dumps, CBORDecoder
|
6
|
+
|
7
|
+
from rewrite_remote.handlers.project_helper import list_projects
|
8
|
+
from rewrite_remote.remote_utils import COMMAND_END
|
9
|
+
from rewrite_remote.remoting import OK, RemotingContext, RemotingMessageType
|
10
|
+
from rewrite_remote.handlers.handler_helpers import respond_with_error
|
11
|
+
|
12
|
+
|
13
|
+
# Main command handler with the specified signature
|
14
|
+
def list_projects_handler(
|
15
|
+
stream: BytesIO, sock: socket.socket, remoting_ctx: RemotingContext
|
16
|
+
) -> None:
|
17
|
+
remoting_ctx.reset()
|
18
|
+
|
19
|
+
# 1. Read input from stream
|
20
|
+
try:
|
21
|
+
data = stream.read()
|
22
|
+
decoder = CBORDecoder(BytesIO(data))
|
23
|
+
root_project_file = str(decoder.decode())
|
24
|
+
except Exception as e: # pylint: disable=broad-except
|
25
|
+
respond_with_error(f"Failed to decode arguments: {e}", sock)
|
26
|
+
return
|
27
|
+
|
28
|
+
if root_project_file == "" or root_project_file is None:
|
29
|
+
respond_with_error("root_project_file is required", sock)
|
30
|
+
return
|
31
|
+
|
32
|
+
# 2. Log the request
|
33
|
+
logging.info(
|
34
|
+
f"""[Server] Handling install-recipe request: {{
|
35
|
+
root_project_file: {root_project_file},
|
36
|
+
}}"""
|
37
|
+
)
|
38
|
+
|
39
|
+
# 3. Find projects
|
40
|
+
projects = list_projects(root_project_file)
|
41
|
+
|
42
|
+
# 4. Log the result
|
43
|
+
logging.info("[Server] Found %d project(s)", len(projects))
|
44
|
+
for project in projects:
|
45
|
+
logging.info(
|
46
|
+
" %s root at %s using %s",
|
47
|
+
project.project_name,
|
48
|
+
project.project_root,
|
49
|
+
project.project_tool,
|
50
|
+
)
|
51
|
+
|
52
|
+
# 5. Write response to stream
|
53
|
+
response: List[str] = []
|
54
|
+
|
55
|
+
for project in projects:
|
56
|
+
response.append(project.project_root)
|
57
|
+
|
58
|
+
# Encode the response using CBOR
|
59
|
+
encoded_response = b""
|
60
|
+
encoded_response += dumps(RemotingMessageType.Response)
|
61
|
+
encoded_response += dumps(OK)
|
62
|
+
encoded_response += dumps(response)
|
63
|
+
encoded_response += COMMAND_END
|
64
|
+
sock.sendall(encoded_response)
|
65
|
+
|
66
|
+
logging.info("[Server] Request completed.")
|
@@ -0,0 +1,196 @@
|
|
1
|
+
import os
|
2
|
+
import glob
|
3
|
+
import toml
|
4
|
+
from typing import Any
|
5
|
+
|
6
|
+
from dataclasses import dataclass
|
7
|
+
|
8
|
+
|
9
|
+
@dataclass
|
10
|
+
class Project:
|
11
|
+
project_name: str
|
12
|
+
project_root: str
|
13
|
+
project_tool: str
|
14
|
+
|
15
|
+
|
16
|
+
def list_projects(pyproject_path: str) -> list[Project]:
|
17
|
+
"""
|
18
|
+
Parses the pyproject.toml file to identify sub projects in a monorepo and returns a list of Projects.
|
19
|
+
"""
|
20
|
+
if not os.path.isfile(pyproject_path):
|
21
|
+
raise FileNotFoundError(f"{pyproject_path} does not exist.")
|
22
|
+
|
23
|
+
# Load and parse the pyproject.toml file
|
24
|
+
with open(pyproject_path, "r") as file:
|
25
|
+
data = toml.load(file)
|
26
|
+
|
27
|
+
sub_projects: list[Project] = []
|
28
|
+
|
29
|
+
if is_poetry_project(data):
|
30
|
+
sub_projects = find_sub_projects_in_poetry(data, pyproject_path)
|
31
|
+
elif is_hatch_project(data):
|
32
|
+
sub_projects = find_sub_projects_in_hatch(data, pyproject_path)
|
33
|
+
elif is_uv_project(data):
|
34
|
+
sub_projects = find_sub_projects_in_uv_sources(data, pyproject_path)
|
35
|
+
else:
|
36
|
+
sub_projects = find_sub_projects_in_project_dependencies(data, pyproject_path)
|
37
|
+
|
38
|
+
return sub_projects
|
39
|
+
|
40
|
+
|
41
|
+
def is_poetry_project(tomlData: dict[str, Any]) -> bool:
|
42
|
+
return (
|
43
|
+
"tool" in tomlData
|
44
|
+
and "poetry" in tomlData["tool"]
|
45
|
+
and "dependencies" in tomlData["tool"]["poetry"]
|
46
|
+
)
|
47
|
+
|
48
|
+
|
49
|
+
def is_hatch_project(tomlData: dict[str, Any]) -> bool:
|
50
|
+
return "tool" in tomlData and "hatch" in tomlData["tool"]
|
51
|
+
|
52
|
+
|
53
|
+
def is_uv_project(tomlData: dict[str, Any]) -> bool:
|
54
|
+
return "tool" in tomlData and "uv" in tomlData["tool"]
|
55
|
+
|
56
|
+
|
57
|
+
def find_sub_projects_in_poetry(
|
58
|
+
tomlData: dict[str, Any], toml_path: str
|
59
|
+
) -> list[Project]:
|
60
|
+
"""
|
61
|
+
Finds sub projects in a poetry project by looking for dependencies with a "path" key:
|
62
|
+
[tool.poetry.dependencies]
|
63
|
+
python = "^3.9"
|
64
|
+
service-a = { path = "./services/service-a" }
|
65
|
+
service-b = { path = "./services/service-b" }
|
66
|
+
shared-lib = { path = "./shared-libraries/shared-lib" }
|
67
|
+
"""
|
68
|
+
subProjects: list[Project] = []
|
69
|
+
for dep_name, dep_value in tomlData["tool"]["poetry"]["dependencies"].items():
|
70
|
+
if isinstance(dep_value, dict) and "path" in dep_value:
|
71
|
+
subProjects.append(
|
72
|
+
Project(
|
73
|
+
project_name=dep_name,
|
74
|
+
project_root=get_absolute_path(toml_path, dep_value["path"]),
|
75
|
+
project_tool="poetry",
|
76
|
+
)
|
77
|
+
)
|
78
|
+
return subProjects
|
79
|
+
|
80
|
+
|
81
|
+
def find_sub_projects_in_hatch(
|
82
|
+
tomlData: dict[str, Any], toml_path: str
|
83
|
+
) -> list[Project]:
|
84
|
+
"""
|
85
|
+
Finds sub projects in a hatch project by looking for dependencies with a "path" key:
|
86
|
+
[tool.hatch.envs.default.dependencies]
|
87
|
+
service-a = { path = "./services/service-a" }
|
88
|
+
service-b = { path = "./services/service-b" }
|
89
|
+
"""
|
90
|
+
subProjects: list[Project] = []
|
91
|
+
hatch_envs = tomlData["tool"]["hatch"].get("envs", {})
|
92
|
+
for env_name, env_data in hatch_envs.items():
|
93
|
+
if isinstance(env_data, dict) and "dependencies" in env_data:
|
94
|
+
dependencies = env_data["dependencies"]
|
95
|
+
if isinstance(dependencies, dict):
|
96
|
+
for dep_name, dep_value in dependencies.items():
|
97
|
+
if isinstance(dep_value, dict) and "path" in dep_value:
|
98
|
+
subProjects.append(
|
99
|
+
Project(
|
100
|
+
project_name=dep_name,
|
101
|
+
project_root=get_absolute_path(
|
102
|
+
toml_path, dep_value["path"]
|
103
|
+
),
|
104
|
+
project_tool=f"hatch:{env_name}",
|
105
|
+
)
|
106
|
+
)
|
107
|
+
return subProjects
|
108
|
+
|
109
|
+
|
110
|
+
def find_sub_projects_in_uv_sources(
|
111
|
+
tomlData: dict[str, Any], toml_path: str
|
112
|
+
) -> list[Project]:
|
113
|
+
"""
|
114
|
+
Finds sub projects in a uv project by looking at sources and workspace:
|
115
|
+
[tool.uv.sources]
|
116
|
+
service-a = { path = "./services/service-a" }
|
117
|
+
service-b = { path = "./services/service-b" }
|
118
|
+
|
119
|
+
[tool.uv.workspace]
|
120
|
+
members = ["packages/*"]
|
121
|
+
exclude = ["packages/excluded/*"]
|
122
|
+
"""
|
123
|
+
subProjects: list[Project] = []
|
124
|
+
uv_sources = tomlData["tool"]["uv"].get("sources", {})
|
125
|
+
uv_workspace = tomlData["tool"]["uv"].get("workspace", {})
|
126
|
+
for source_name, source_data in uv_sources.items():
|
127
|
+
if isinstance(source_data, dict) and "path" in source_data:
|
128
|
+
subProjects.append(
|
129
|
+
Project(
|
130
|
+
project_name=source_name,
|
131
|
+
project_root=get_absolute_path(toml_path, source_data["path"]),
|
132
|
+
project_tool="uv.sources",
|
133
|
+
)
|
134
|
+
)
|
135
|
+
if isinstance(uv_workspace, dict) and "members" in uv_workspace:
|
136
|
+
excluded_directories = []
|
137
|
+
exclude_globs = uv_workspace.get("exclude", [])
|
138
|
+
for exclude_glob in exclude_globs:
|
139
|
+
excluded_directories.extend(
|
140
|
+
glob.glob(
|
141
|
+
os.path.join(os.path.dirname(toml_path), exclude_glob),
|
142
|
+
recursive=True,
|
143
|
+
)
|
144
|
+
)
|
145
|
+
|
146
|
+
for glob_pattern in uv_workspace["members"]:
|
147
|
+
# Every directory included by the members globs (and not excluded by the exclude globs) must contain a pyproject.toml file
|
148
|
+
directories = glob.glob(
|
149
|
+
os.path.join(os.path.dirname(toml_path), glob_pattern), recursive=True
|
150
|
+
)
|
151
|
+
|
152
|
+
for directory in directories:
|
153
|
+
if (
|
154
|
+
os.path.exists(os.path.join(directory, "pyproject.toml"))
|
155
|
+
and directory not in excluded_directories
|
156
|
+
):
|
157
|
+
subProjects.append(
|
158
|
+
Project(
|
159
|
+
project_name=os.path.basename(directory),
|
160
|
+
project_root=directory,
|
161
|
+
project_tool="uv.workspace",
|
162
|
+
)
|
163
|
+
)
|
164
|
+
|
165
|
+
return subProjects
|
166
|
+
|
167
|
+
|
168
|
+
def find_sub_projects_in_project_dependencies(
|
169
|
+
tomlData: dict[str, Any], toml_path: str
|
170
|
+
) -> list[Project]:
|
171
|
+
"""
|
172
|
+
Finds sub projects in a project dependencies by looking for dependencies with
|
173
|
+
the @ file:///${PROJECT_ROOT}//${SUBPROJECT_PATH} format:
|
174
|
+
[project]
|
175
|
+
dependencies = [
|
176
|
+
"service-a @ file:///${PROJECT_ROOT}//services/service-a",
|
177
|
+
"service-b @ file:///${PROJECT_ROOT}//services/service-b"
|
178
|
+
]
|
179
|
+
"""
|
180
|
+
subProjects: list[Project] = []
|
181
|
+
for dep in tomlData["project"]["dependencies"]:
|
182
|
+
if isinstance(dep, str) and dep.find("@ file:///${PROJECT_ROOT}//"):
|
183
|
+
rel_path = dep.split("@ file:///${PROJECT_ROOT}//")[1]
|
184
|
+
subProjects.append(
|
185
|
+
Project(
|
186
|
+
project_name=os.path.basename(dep),
|
187
|
+
project_root=get_absolute_path(toml_path, rel_path),
|
188
|
+
project_tool="project.dependencies",
|
189
|
+
)
|
190
|
+
)
|
191
|
+
return subProjects
|
192
|
+
|
193
|
+
|
194
|
+
def get_absolute_path(path_to_root_toml: str, path_to_sub_project: str) -> str:
|
195
|
+
path_to_root = os.path.dirname(path_to_root_toml)
|
196
|
+
return os.path.abspath(os.path.join(path_to_root, path_to_sub_project))
|
@@ -0,0 +1,300 @@
|
|
1
|
+
import os
|
2
|
+
import sys
|
3
|
+
import subprocess
|
4
|
+
import importlib
|
5
|
+
import inspect
|
6
|
+
import pkgutil
|
7
|
+
from dataclasses import dataclass
|
8
|
+
|
9
|
+
from pypi_simple import PyPISimple
|
10
|
+
from packaging.version import parse as parse_version
|
11
|
+
from packaging.specifiers import SpecifierSet
|
12
|
+
from rewrite import Recipe
|
13
|
+
|
14
|
+
from typing import Optional, Dict, List, Any
|
15
|
+
|
16
|
+
DEFAULT_PYPI_URL = "https://pypi.org/simple"
|
17
|
+
DEFAULT_RECIPE_INSTALL_LOCATION = os.path.join(".", ".local_python_recipes")
|
18
|
+
|
19
|
+
# Ensure that the directory is on sys.path
|
20
|
+
if DEFAULT_RECIPE_INSTALL_LOCATION not in sys.path:
|
21
|
+
sys.path.insert(0, DEFAULT_RECIPE_INSTALL_LOCATION)
|
22
|
+
|
23
|
+
|
24
|
+
class Source:
|
25
|
+
def __init__(
|
26
|
+
self,
|
27
|
+
source: str,
|
28
|
+
username: Optional[str] = None,
|
29
|
+
password: Optional[str] = None,
|
30
|
+
token: Optional[str] = None,
|
31
|
+
):
|
32
|
+
self.source = source
|
33
|
+
self.username = username
|
34
|
+
self.password = password
|
35
|
+
self.token = token
|
36
|
+
|
37
|
+
|
38
|
+
@dataclass
|
39
|
+
class Option:
|
40
|
+
name: str
|
41
|
+
type: str
|
42
|
+
required: bool
|
43
|
+
|
44
|
+
|
45
|
+
@dataclass
|
46
|
+
class InstalledRecipe:
|
47
|
+
name: str
|
48
|
+
source: str
|
49
|
+
options: List[Option]
|
50
|
+
|
51
|
+
|
52
|
+
class InstalledPackage:
|
53
|
+
def __init__(
|
54
|
+
self, name: str, version: str, source: str, recipes: List[InstalledRecipe]
|
55
|
+
):
|
56
|
+
self.name = name
|
57
|
+
self.version = version
|
58
|
+
self.source = source
|
59
|
+
self.recipes = recipes
|
60
|
+
|
61
|
+
|
62
|
+
class PyPiManager:
|
63
|
+
@staticmethod
|
64
|
+
def find_valid_source(
|
65
|
+
package_name: str,
|
66
|
+
requestedVersion: str,
|
67
|
+
package_sources: List[Source],
|
68
|
+
include_default_source: bool = True,
|
69
|
+
) -> Optional[Source]:
|
70
|
+
"""
|
71
|
+
Finds a valid source for the specified package using `pip show` with `--index-url`.
|
72
|
+
"""
|
73
|
+
package_identifier = (
|
74
|
+
f"{package_name}=={requestedVersion}" if requestedVersion else package_name
|
75
|
+
)
|
76
|
+
if include_default_source and not any(
|
77
|
+
source.source == DEFAULT_PYPI_URL for source in package_sources
|
78
|
+
):
|
79
|
+
package_sources.append(Source(source=DEFAULT_PYPI_URL))
|
80
|
+
|
81
|
+
for source in package_sources:
|
82
|
+
try:
|
83
|
+
authenticated_url = PyPiManager._get_authenticated_url(source)
|
84
|
+
result = PyPiManager._package_exists_in_registry(
|
85
|
+
authenticated_url,
|
86
|
+
package_name,
|
87
|
+
requestedVersion,
|
88
|
+
)
|
89
|
+
if result:
|
90
|
+
print(
|
91
|
+
f"Package {package_identifier} found in source: {source.source}"
|
92
|
+
)
|
93
|
+
return source
|
94
|
+
else:
|
95
|
+
print(
|
96
|
+
f"Package {package_identifier} not found in source: {source.source}"
|
97
|
+
)
|
98
|
+
except Exception as e:
|
99
|
+
print(f"Error checking source {source.source}: {e}")
|
100
|
+
|
101
|
+
return None
|
102
|
+
|
103
|
+
@staticmethod
|
104
|
+
def install_package(
|
105
|
+
package_name: str,
|
106
|
+
requestedVersion: Optional[str] = None,
|
107
|
+
package_source: Optional[Source] = None,
|
108
|
+
) -> InstalledPackage:
|
109
|
+
"""
|
110
|
+
Installs the specified package from a given source.
|
111
|
+
"""
|
112
|
+
package_identifier = (
|
113
|
+
f"{package_name}=={requestedVersion}" if requestedVersion else package_name
|
114
|
+
)
|
115
|
+
|
116
|
+
pip_command = [
|
117
|
+
sys.executable,
|
118
|
+
"-m",
|
119
|
+
"pip",
|
120
|
+
"install",
|
121
|
+
package_identifier,
|
122
|
+
"--target",
|
123
|
+
DEFAULT_RECIPE_INSTALL_LOCATION,
|
124
|
+
]
|
125
|
+
|
126
|
+
if package_source:
|
127
|
+
authenticated_url = PyPiManager._get_authenticated_url(package_source)
|
128
|
+
pip_command.extend(["--index-url", authenticated_url])
|
129
|
+
else:
|
130
|
+
pip_command.extend(["--index-url", DEFAULT_PYPI_URL])
|
131
|
+
|
132
|
+
# create directory if it does not exist
|
133
|
+
if not os.path.exists(DEFAULT_RECIPE_INSTALL_LOCATION):
|
134
|
+
os.makedirs(DEFAULT_RECIPE_INSTALL_LOCATION)
|
135
|
+
|
136
|
+
try:
|
137
|
+
subprocess.run(pip_command, check=True)
|
138
|
+
metadata = PyPiManager._get_package_metadata(package_name)
|
139
|
+
resolvedVersion = metadata.get("version", "")
|
140
|
+
|
141
|
+
discovered_recipes = PyPiManager._introspect_module(package_name)
|
142
|
+
|
143
|
+
return InstalledPackage(
|
144
|
+
name=package_name,
|
145
|
+
version=resolvedVersion,
|
146
|
+
source=package_source.source if package_source else DEFAULT_PYPI_URL,
|
147
|
+
recipes=discovered_recipes,
|
148
|
+
)
|
149
|
+
except Exception as e:
|
150
|
+
raise RuntimeError(f"Failed to install package {package_name}: {e}")
|
151
|
+
|
152
|
+
@staticmethod
|
153
|
+
def uninstall_package(package_name: str) -> None:
|
154
|
+
"""
|
155
|
+
Uninstalls the specified package using pip.
|
156
|
+
"""
|
157
|
+
try:
|
158
|
+
subprocess.run(["pip", "uninstall", "-y", package_name], check=True)
|
159
|
+
print(f"Package {package_name} uninstalled successfully.")
|
160
|
+
except Exception as e:
|
161
|
+
raise RuntimeError(f"Failed to uninstall package {package_name}: {e}")
|
162
|
+
|
163
|
+
@staticmethod
|
164
|
+
def load_recipe(
|
165
|
+
recipe_name: str, module_name: str, recipe_options: List[Option]
|
166
|
+
) -> Recipe:
|
167
|
+
"""
|
168
|
+
Loads a recipe from the specified source.
|
169
|
+
"""
|
170
|
+
try:
|
171
|
+
module = importlib.import_module(module_name)
|
172
|
+
recipe = getattr(module, recipe_name)
|
173
|
+
|
174
|
+
has_params = bool(inspect.signature(recipe).parameters)
|
175
|
+
|
176
|
+
if not has_params:
|
177
|
+
return recipe()
|
178
|
+
else:
|
179
|
+
return recipe(recipe_options)
|
180
|
+
except Exception as e:
|
181
|
+
raise RuntimeError(f"Failed to load recipe {recipe_name}: {e}")
|
182
|
+
|
183
|
+
@staticmethod
|
184
|
+
def load_package_details(package_name: str) -> Dict[str, Any]:
|
185
|
+
"""
|
186
|
+
Loads package details using `pip show`.
|
187
|
+
"""
|
188
|
+
try:
|
189
|
+
metadata = PyPiManager._get_package_metadata(package_name)
|
190
|
+
if not metadata:
|
191
|
+
raise RuntimeError(f"Package {package_name} is not installed.")
|
192
|
+
return metadata
|
193
|
+
except Exception as e:
|
194
|
+
raise RuntimeError(
|
195
|
+
f"Failed to load package details for {package_name}: {e}"
|
196
|
+
)
|
197
|
+
|
198
|
+
@staticmethod
|
199
|
+
def _get_package_metadata(package_name: str) -> Dict[str, Any]:
|
200
|
+
"""
|
201
|
+
Extracts metadata for an installed package using `pip show`.
|
202
|
+
"""
|
203
|
+
# Ensure the custom install location is on sys.path
|
204
|
+
if DEFAULT_RECIPE_INSTALL_LOCATION not in sys.path:
|
205
|
+
sys.path.insert(0, DEFAULT_RECIPE_INSTALL_LOCATION)
|
206
|
+
|
207
|
+
try:
|
208
|
+
dist = importlib.metadata.distribution(package_name)
|
209
|
+
# Convert it into a dictionary of metadata fields
|
210
|
+
metadata_dict = {key.lower(): value for key, value in dist.metadata.items()}
|
211
|
+
return metadata_dict
|
212
|
+
except importlib.metadata.PackageNotFoundError:
|
213
|
+
print(
|
214
|
+
f"Package {package_name} not found in {DEFAULT_RECIPE_INSTALL_LOCATION}"
|
215
|
+
)
|
216
|
+
return {}
|
217
|
+
|
218
|
+
@staticmethod
|
219
|
+
def _introspect_module(module_name: str) -> List[InstalledRecipe]:
|
220
|
+
# Ensure the custom install location is on sys.path
|
221
|
+
if DEFAULT_RECIPE_INSTALL_LOCATION not in sys.path:
|
222
|
+
sys.path.insert(0, DEFAULT_RECIPE_INSTALL_LOCATION)
|
223
|
+
|
224
|
+
# Convert to snake case per proper import conventions
|
225
|
+
module_name = module_name.replace("-", "_")
|
226
|
+
|
227
|
+
try:
|
228
|
+
module = importlib.import_module(module_name)
|
229
|
+
submodules = [name for _, name, _ in pkgutil.iter_modules(module.__path__)]
|
230
|
+
|
231
|
+
discovered_recipes: List[InstalledRecipe] = []
|
232
|
+
|
233
|
+
for submodule in submodules:
|
234
|
+
full_submodule_name = f"{module_name}.{submodule}"
|
235
|
+
# Import each submodule
|
236
|
+
sm = importlib.import_module(full_submodule_name)
|
237
|
+
|
238
|
+
# Get all classes in the submodule
|
239
|
+
classes = inspect.getmembers(sm, inspect.isclass)
|
240
|
+
|
241
|
+
for class_name, class_obj in classes:
|
242
|
+
# Check if class is a subclass of Recipe
|
243
|
+
if issubclass(class_obj, Recipe) and class_obj is not Recipe:
|
244
|
+
discovered_recipes.append(
|
245
|
+
InstalledRecipe(
|
246
|
+
name=class_name,
|
247
|
+
source=full_submodule_name,
|
248
|
+
options=[], # TODO support options
|
249
|
+
)
|
250
|
+
)
|
251
|
+
|
252
|
+
return discovered_recipes
|
253
|
+
except Exception as e:
|
254
|
+
raise RuntimeError(f"Failed to introspect module {module_name}: {e}")
|
255
|
+
|
256
|
+
@staticmethod
|
257
|
+
def _package_exists_in_registry(
|
258
|
+
index_url: str, package_name: str, version: Optional[str] = None
|
259
|
+
) -> bool:
|
260
|
+
if not index_url.endswith("/"):
|
261
|
+
index_url += "/"
|
262
|
+
client = PyPISimple(endpoint=index_url)
|
263
|
+
|
264
|
+
try:
|
265
|
+
project_page = client.get_project_page(package_name)
|
266
|
+
if not project_page:
|
267
|
+
return False
|
268
|
+
|
269
|
+
# When no version the presence of the package is enough to confirm a valid source
|
270
|
+
if version is None:
|
271
|
+
return True
|
272
|
+
|
273
|
+
specifier = SpecifierSet(f"=={version}")
|
274
|
+
|
275
|
+
# Hunt for a distribution that matches the requested version
|
276
|
+
for dist in project_page.packages:
|
277
|
+
if dist.version is not None:
|
278
|
+
dist_version = parse_version(dist.version)
|
279
|
+
if dist_version in specifier:
|
280
|
+
return True
|
281
|
+
|
282
|
+
# No distributions matched the requested version specifier
|
283
|
+
return False
|
284
|
+
|
285
|
+
except Exception as e: # pylint: disable=broad-except
|
286
|
+
print(f"Error checking package {package_name}: {e}")
|
287
|
+
return False
|
288
|
+
|
289
|
+
@staticmethod
|
290
|
+
def _get_authenticated_url(source: Source) -> str:
|
291
|
+
"""
|
292
|
+
Returns the source URL with embedded authentication if username/password or token is provided.
|
293
|
+
"""
|
294
|
+
if source.username and source.password:
|
295
|
+
return source.source.replace(
|
296
|
+
"https://", f"https://{source.username}:{source.password}@"
|
297
|
+
)
|
298
|
+
elif source.token:
|
299
|
+
return source.source.replace("https://", f"https://{source.token}@")
|
300
|
+
return source.source
|