openrewrite-remote 0.13.4__py3-none-any.whl → 0.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {openrewrite_remote-0.13.4.dist-info → openrewrite_remote-0.14.0.dist-info}/METADATA +1 -1
- openrewrite_remote-0.14.0.dist-info/RECORD +24 -0
- {openrewrite_remote-0.13.4.dist-info → openrewrite_remote-0.14.0.dist-info}/WHEEL +1 -1
- rewrite_remote/__init__.py +1 -3
- rewrite_remote/handlers/handler_helpers.py +1 -1
- rewrite_remote/handlers/hello_world_handler.py +1 -2
- rewrite_remote/handlers/parse_project_sources_handler.py +95 -0
- rewrite_remote/handlers/project_helper.py +58 -14
- rewrite_remote/handlers/pypi_manager.py +13 -16
- rewrite_remote/handlers/recipe_install_handler.py +1 -3
- rewrite_remote/handlers/run_recipe_load_and_visitor_handler.py +3 -7
- rewrite_remote/receiver.py +54 -38
- rewrite_remote/remote_utils.py +21 -15
- rewrite_remote/remoting.py +17 -12
- rewrite_remote/sender.py +75 -118
- rewrite_remote/server.py +28 -22
- rewrite_remote/type_utils.py +6 -2
- openrewrite_remote-0.13.4.dist-info/RECORD +0 -23
- {openrewrite_remote-0.13.4.dist-info → openrewrite_remote-0.14.0.dist-info}/entry_points.txt +0 -0
- {openrewrite_remote-0.13.4.dist-info → openrewrite_remote-0.14.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,24 @@
|
|
1
|
+
rewrite_remote/__init__.py,sha256=gl4Dpp7mYIrfNgeyh2IeW2fsj9ep9pveAK_eeoDLu7c,278
|
2
|
+
rewrite_remote/client.py,sha256=95ZCAtVOngF0ZqqKnOsrweUeGKruf3UKGPXNGTrNyy0,1853
|
3
|
+
rewrite_remote/event.py,sha256=texLJD1mcFkpBpiXAa-Rmip0Tgqm2OlBpRPHFZyWcBs,359
|
4
|
+
rewrite_remote/receiver.py,sha256=b2jMNwTfnWM0oDNjJQSWV1z_ahJ0kDZEXYdwNc4riIw,20743
|
5
|
+
rewrite_remote/remote_utils.py,sha256=gU9hN-aHxy9NF4uRqjd4OhvgPrNfDyCt3y1PtrWMCgA,10677
|
6
|
+
rewrite_remote/remoting.py,sha256=yS2sYMPPBq0gt1xalmCTLbH4VXbBqNl-EXj8ZfgA5UM,13525
|
7
|
+
rewrite_remote/sender.py,sha256=PXX0hrmeHKEge_7qWEMqNn4KYleDrFUQqhBDWTq1pus,18993
|
8
|
+
rewrite_remote/server.py,sha256=TQ1cK6eAiJgY0ZAGl4huu5urI2Cm9DJpl-k8Is-IM1M,9466
|
9
|
+
rewrite_remote/type_utils.py,sha256=qzQ2X97U8yRskjOo3F5qcFVebW2bPYFU32EpCKL-KNU,4400
|
10
|
+
rewrite_remote/handlers/__init__.py,sha256=ED6jHcYiuYpr_0vjGz0zx2lrrmJT9sDJCzIljoDfmlM,65
|
11
|
+
rewrite_remote/handlers/handler_helpers.py,sha256=xvAKAVGH56Hrjp9sbilp4v1a2_ra0L63YjXurF5K5o0,512
|
12
|
+
rewrite_remote/handlers/hello_world_handler.py,sha256=HLVukS16-PRRidlnfABT6ETXURhQYIJnTSb09PJnDek,1284
|
13
|
+
rewrite_remote/handlers/list_projects_handler.py,sha256=ukkCST7whpCxQ0omCWWw9W27Su3rFSnnEIhAS6T4iOU,2007
|
14
|
+
rewrite_remote/handlers/parse_project_sources_handler.py,sha256=mn7LVW7yu3SFFDdZlTU4i65wFhQxhkA7ttSqSs3s7Vk,2997
|
15
|
+
rewrite_remote/handlers/project_helper.py,sha256=wEohTWukfcDCNlSran1cuedg9CGeVdbhiHRD2wwpAwk,8445
|
16
|
+
rewrite_remote/handlers/pypi_manager.py,sha256=XulTdZUm5aTme7Qn-jaUvlgO6N9bKS03UUQtbxv9kYE,10247
|
17
|
+
rewrite_remote/handlers/recipe_install_handler.py,sha256=SsY7oXEJqE3mKUM_msnV0i-aUXmeUWpYjq_121ui3Z8,4920
|
18
|
+
rewrite_remote/handlers/run_recipe_load_and_visitor_handler.py,sha256=BmsKjV38LenewWvun8cknGk_oR3GoinmmsXtWKUWUak,4085
|
19
|
+
rewrite_remote/handlers/types.py,sha256=5TK_oFp-7iy1iACWisuQJsa_WHTsqcPrV9nQTZ-_xgo,518
|
20
|
+
openrewrite_remote-0.14.0.dist-info/METADATA,sha256=ydjRf-SRkjLYHaWICk1zDeKw8gKDhL11Y_hm_kB29AM,386
|
21
|
+
openrewrite_remote-0.14.0.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
|
22
|
+
openrewrite_remote-0.14.0.dist-info/entry_points.txt,sha256=SMukuF7TPjQr3IZIcH8f98-_QBCqYSbYXYrVv-5UzRI,69
|
23
|
+
openrewrite_remote-0.14.0.dist-info/top_level.txt,sha256=ansTioSZ-62aH3F2L3d1Bua0pJF4GOtgQ1PpG-CzcP0,15
|
24
|
+
openrewrite_remote-0.14.0.dist-info/RECORD,,
|
rewrite_remote/__init__.py
CHANGED
@@ -6,7 +6,5 @@ from .sender import *
|
|
6
6
|
from .remoting import *
|
7
7
|
|
8
8
|
__all__ = [
|
9
|
-
name
|
10
|
-
for name in dir()
|
11
|
-
if not name.startswith("_") and not isinstance(globals()[name], TypeVar)
|
9
|
+
name for name in dir() if not name.startswith("_") and not isinstance(globals()[name], TypeVar)
|
12
10
|
]
|
@@ -3,7 +3,7 @@ import socket
|
|
3
3
|
import cbor2
|
4
4
|
|
5
5
|
from rewrite_remote.remote_utils import COMMAND_END
|
6
|
-
from rewrite_remote import RemotingMessageType,
|
6
|
+
from rewrite_remote import RemotingMessageType, ERROR
|
7
7
|
|
8
8
|
|
9
9
|
def respond_with_error(message: str, sock: socket.socket) -> None:
|
@@ -18,9 +18,8 @@ def hello_world_handler(
|
|
18
18
|
request = cbor2.load(stream)
|
19
19
|
|
20
20
|
if request != "hello":
|
21
|
-
raise ValueError(f"Unexpected request: {request}")
|
22
|
-
else:
|
23
21
|
print("Did not receive 'hello' ")
|
22
|
+
raise ValueError(f"Unexpected request: {request}")
|
24
23
|
|
25
24
|
# Prepare a response
|
26
25
|
response_stream = BytesIO()
|
@@ -0,0 +1,95 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
import socket
|
4
|
+
from io import BytesIO
|
5
|
+
from typing import TypedDict
|
6
|
+
|
7
|
+
import cbor2
|
8
|
+
|
9
|
+
from cbor2 import dumps, CBORDecoder
|
10
|
+
|
11
|
+
from rewrite_remote.handlers.project_helper import (
|
12
|
+
find_python_files,
|
13
|
+
parse_python_sources,
|
14
|
+
)
|
15
|
+
|
16
|
+
from rewrite_remote.remoting import (
|
17
|
+
OK,
|
18
|
+
RemotingMessageType,
|
19
|
+
)
|
20
|
+
|
21
|
+
from rewrite_remote.remote_utils import COMMAND_END
|
22
|
+
from rewrite_remote.remoting import RemotingContext
|
23
|
+
from rewrite_remote.handlers.handler_helpers import respond_with_error
|
24
|
+
from rewrite_remote.remoting import RemotingMessenger
|
25
|
+
|
26
|
+
|
27
|
+
class ParseProjectSourcesArgs(TypedDict):
|
28
|
+
project_file_path: str # The path to the individual pyproject.toml
|
29
|
+
root_project_file_path: str # The path to the root pyproject.toml
|
30
|
+
repository_dir: str # The path to the root repository directory
|
31
|
+
|
32
|
+
|
33
|
+
def decode_parse_project_sources_args(
|
34
|
+
decoder: CBORDecoder,
|
35
|
+
) -> ParseProjectSourcesArgs:
|
36
|
+
"""
|
37
|
+
Decodes the arguments (order matters and must match the order encoded)
|
38
|
+
"""
|
39
|
+
project_file_path = str(decoder.decode())
|
40
|
+
root_project_file_path = str(decoder.decode())
|
41
|
+
repository_dir = str(decoder.decode())
|
42
|
+
|
43
|
+
return {
|
44
|
+
"project_file_path": project_file_path,
|
45
|
+
"root_project_file_path": root_project_file_path,
|
46
|
+
"repository_dir": repository_dir,
|
47
|
+
}
|
48
|
+
|
49
|
+
|
50
|
+
def parse_project_sources_handler(
|
51
|
+
stream: BytesIO, sock: socket.socket, remoting_ctx: RemotingContext
|
52
|
+
) -> None:
|
53
|
+
remoting_ctx.reset()
|
54
|
+
|
55
|
+
# Read input from stream
|
56
|
+
try:
|
57
|
+
data = stream.read()
|
58
|
+
decoder = CBORDecoder(BytesIO(data))
|
59
|
+
args = decode_parse_project_sources_args(decoder)
|
60
|
+
project_file_path = args.get("project_file_path")
|
61
|
+
root_project_file_path = args.get("root_project_file_path")
|
62
|
+
repository_dir = args.get("repository_dir")
|
63
|
+
except Exception as e: # pylint: disable=broad-except
|
64
|
+
respond_with_error(f"Failed to decode arguments: {e}", sock)
|
65
|
+
return
|
66
|
+
|
67
|
+
if project_file_path is None:
|
68
|
+
respond_with_error("recipe_name is required", sock)
|
69
|
+
return
|
70
|
+
|
71
|
+
# Log the request
|
72
|
+
logging.info(
|
73
|
+
"[Server] Handling parse-project-sources request: {"
|
74
|
+
"project_file_path: %s, root_project_file_path: %s, repository_dir: %s}",
|
75
|
+
project_file_path,
|
76
|
+
root_project_file_path,
|
77
|
+
repository_dir,
|
78
|
+
)
|
79
|
+
|
80
|
+
# Find all python files in the project
|
81
|
+
base_dir = os.path.dirname(project_file_path)
|
82
|
+
python_files = find_python_files(base_dir)
|
83
|
+
source_files = parse_python_sources(python_files)
|
84
|
+
|
85
|
+
# Write the response
|
86
|
+
response_stream = BytesIO()
|
87
|
+
cbor2.dump(RemotingMessageType.Response, response_stream)
|
88
|
+
cbor2.dump(OK, response_stream)
|
89
|
+
for source_file in source_files:
|
90
|
+
logging.info("Sending ${source_file.source_path}")
|
91
|
+
RemotingMessenger.send_tree(remoting_ctx, response_stream, source_file, None)
|
92
|
+
cbor2.dump(COMMAND_END, response_stream)
|
93
|
+
sock.sendall(response_stream.getvalue())
|
94
|
+
|
95
|
+
logging.info("[Server] Request completed.")
|
@@ -1,7 +1,14 @@
|
|
1
1
|
import os
|
2
2
|
import glob
|
3
3
|
import toml
|
4
|
-
from typing import Any
|
4
|
+
from typing import Any, List, Iterable
|
5
|
+
from pathlib import Path
|
6
|
+
from io import StringIO
|
7
|
+
|
8
|
+
from rewrite.tree import SourceFile
|
9
|
+
from rewrite.python.parser import PythonParserBuilder
|
10
|
+
|
11
|
+
from rewrite import ParserInput, InMemoryExecutionContext
|
5
12
|
|
6
13
|
from dataclasses import dataclass
|
7
14
|
|
@@ -54,9 +61,7 @@ def is_uv_project(tomlData: dict[str, Any]) -> bool:
|
|
54
61
|
return "tool" in tomlData and "uv" in tomlData["tool"]
|
55
62
|
|
56
63
|
|
57
|
-
def find_sub_projects_in_poetry(
|
58
|
-
tomlData: dict[str, Any], toml_path: str
|
59
|
-
) -> list[Project]:
|
64
|
+
def find_sub_projects_in_poetry(tomlData: dict[str, Any], toml_path: str) -> list[Project]:
|
60
65
|
"""
|
61
66
|
Finds sub projects in a poetry project by looking for dependencies with a "path" key:
|
62
67
|
[tool.poetry.dependencies]
|
@@ -78,9 +83,7 @@ def find_sub_projects_in_poetry(
|
|
78
83
|
return subProjects
|
79
84
|
|
80
85
|
|
81
|
-
def find_sub_projects_in_hatch(
|
82
|
-
tomlData: dict[str, Any], toml_path: str
|
83
|
-
) -> list[Project]:
|
86
|
+
def find_sub_projects_in_hatch(tomlData: dict[str, Any], toml_path: str) -> list[Project]:
|
84
87
|
"""
|
85
88
|
Finds sub projects in a hatch project by looking for dependencies with a "path" key:
|
86
89
|
[tool.hatch.envs.default.dependencies]
|
@@ -98,18 +101,14 @@ def find_sub_projects_in_hatch(
|
|
98
101
|
subProjects.append(
|
99
102
|
Project(
|
100
103
|
project_name=dep_name,
|
101
|
-
project_root=get_absolute_path(
|
102
|
-
toml_path, dep_value["path"]
|
103
|
-
),
|
104
|
+
project_root=get_absolute_path(toml_path, dep_value["path"]),
|
104
105
|
project_tool=f"hatch:{env_name}",
|
105
106
|
)
|
106
107
|
)
|
107
108
|
return subProjects
|
108
109
|
|
109
110
|
|
110
|
-
def find_sub_projects_in_uv_sources(
|
111
|
-
tomlData: dict[str, Any], toml_path: str
|
112
|
-
) -> list[Project]:
|
111
|
+
def find_sub_projects_in_uv_sources(tomlData: dict[str, Any], toml_path: str) -> list[Project]:
|
113
112
|
"""
|
114
113
|
Finds sub projects in a uv project by looking at sources and workspace:
|
115
114
|
[tool.uv.sources]
|
@@ -146,7 +145,8 @@ def find_sub_projects_in_uv_sources(
|
|
146
145
|
for glob_pattern in uv_workspace["members"]:
|
147
146
|
# Every directory included by the members globs (and not excluded by the exclude globs) must contain a pyproject.toml file
|
148
147
|
directories = glob.glob(
|
149
|
-
os.path.join(os.path.dirname(toml_path), glob_pattern),
|
148
|
+
os.path.join(os.path.dirname(toml_path), glob_pattern),
|
149
|
+
recursive=True,
|
150
150
|
)
|
151
151
|
|
152
152
|
for directory in directories:
|
@@ -194,3 +194,47 @@ def find_sub_projects_in_project_dependencies(
|
|
194
194
|
def get_absolute_path(path_to_root_toml: str, path_to_sub_project: str) -> str:
|
195
195
|
path_to_root = os.path.dirname(path_to_root_toml)
|
196
196
|
return os.path.abspath(os.path.join(path_to_root, path_to_sub_project))
|
197
|
+
|
198
|
+
|
199
|
+
def find_python_files(base_dir: str) -> List[str]:
|
200
|
+
"""
|
201
|
+
Find all python files in the given directory and its subdirectories
|
202
|
+
"""
|
203
|
+
python_files = []
|
204
|
+
for root, dirs, files in os.walk(base_dir):
|
205
|
+
for file in files:
|
206
|
+
if file.endswith(".py"):
|
207
|
+
python_files.append(os.path.join(root, file))
|
208
|
+
return python_files
|
209
|
+
|
210
|
+
|
211
|
+
def read_file_contents(path: str) -> StringIO:
|
212
|
+
"""
|
213
|
+
Read the contents of the file at the given path
|
214
|
+
"""
|
215
|
+
with open(path, "r", newline="", encoding="utf-8") as file:
|
216
|
+
return StringIO(file.read())
|
217
|
+
|
218
|
+
|
219
|
+
def parse_python_sources(paths: List[str]) -> List[SourceFile]:
|
220
|
+
"""
|
221
|
+
Parse the given python files and return a list of SourceFile objects
|
222
|
+
"""
|
223
|
+
parser = PythonParserBuilder().build()
|
224
|
+
ctx = InMemoryExecutionContext()
|
225
|
+
|
226
|
+
iterable_source_files: Iterable[SourceFile] = parser.parse_inputs(
|
227
|
+
[
|
228
|
+
ParserInput(
|
229
|
+
Path(path),
|
230
|
+
None,
|
231
|
+
True,
|
232
|
+
lambda: read_file_contents(path),
|
233
|
+
)
|
234
|
+
for path in paths
|
235
|
+
],
|
236
|
+
None,
|
237
|
+
ctx,
|
238
|
+
)
|
239
|
+
|
240
|
+
return list(iterable_source_files)
|
@@ -4,6 +4,8 @@ import subprocess
|
|
4
4
|
import importlib
|
5
5
|
import inspect
|
6
6
|
import pkgutil
|
7
|
+
import site
|
8
|
+
|
7
9
|
from dataclasses import dataclass
|
8
10
|
|
9
11
|
from pypi_simple import PyPISimple
|
@@ -51,7 +53,11 @@ class InstalledRecipe:
|
|
51
53
|
|
52
54
|
class InstalledPackage:
|
53
55
|
def __init__(
|
54
|
-
self,
|
56
|
+
self,
|
57
|
+
name: str,
|
58
|
+
version: str,
|
59
|
+
source: str,
|
60
|
+
recipes: List[InstalledRecipe],
|
55
61
|
):
|
56
62
|
self.name = name
|
57
63
|
self.version = version
|
@@ -87,14 +93,10 @@ class PyPiManager:
|
|
87
93
|
requestedVersion,
|
88
94
|
)
|
89
95
|
if result:
|
90
|
-
print(
|
91
|
-
f"Package {package_identifier} found in source: {source.source}"
|
92
|
-
)
|
96
|
+
print(f"Package {package_identifier} found in source: {source.source}")
|
93
97
|
return source
|
94
98
|
else:
|
95
|
-
print(
|
96
|
-
f"Package {package_identifier} not found in source: {source.source}"
|
97
|
-
)
|
99
|
+
print(f"Package {package_identifier} not found in source: {source.source}")
|
98
100
|
except Exception as e:
|
99
101
|
print(f"Error checking source {source.source}: {e}")
|
100
102
|
|
@@ -161,9 +163,7 @@ class PyPiManager:
|
|
161
163
|
raise RuntimeError(f"Failed to uninstall package {package_name}: {e}")
|
162
164
|
|
163
165
|
@staticmethod
|
164
|
-
def load_recipe(
|
165
|
-
recipe_name: str, module_name: str, recipe_options: List[Option]
|
166
|
-
) -> Recipe:
|
166
|
+
def load_recipe(recipe_name: str, module_name: str, recipe_options: List[Option]) -> Recipe:
|
167
167
|
"""
|
168
168
|
Loads a recipe from the specified source.
|
169
169
|
"""
|
@@ -191,9 +191,7 @@ class PyPiManager:
|
|
191
191
|
raise RuntimeError(f"Package {package_name} is not installed.")
|
192
192
|
return metadata
|
193
193
|
except Exception as e:
|
194
|
-
raise RuntimeError(
|
195
|
-
f"Failed to load package details for {package_name}: {e}"
|
196
|
-
)
|
194
|
+
raise RuntimeError(f"Failed to load package details for {package_name}: {e}")
|
197
195
|
|
198
196
|
@staticmethod
|
199
197
|
def _get_package_metadata(package_name: str) -> Dict[str, Any]:
|
@@ -210,9 +208,7 @@ class PyPiManager:
|
|
210
208
|
metadata_dict = {key.lower(): value for key, value in dist.metadata.items()}
|
211
209
|
return metadata_dict
|
212
210
|
except importlib.metadata.PackageNotFoundError:
|
213
|
-
print(
|
214
|
-
f"Package {package_name} not found in {DEFAULT_RECIPE_INSTALL_LOCATION}"
|
215
|
-
)
|
211
|
+
print(f"Package {package_name} not found in {DEFAULT_RECIPE_INSTALL_LOCATION}")
|
216
212
|
return {}
|
217
213
|
|
218
214
|
@staticmethod
|
@@ -225,6 +221,7 @@ class PyPiManager:
|
|
225
221
|
module_name = module_name.replace("-", "_")
|
226
222
|
|
227
223
|
try:
|
224
|
+
site.main() # We want the recently installed module to be available to introspection
|
228
225
|
module = importlib.import_module(module_name)
|
229
226
|
submodules = [name for _, name, _ in pkgutil.iter_modules(module.__path__)]
|
230
227
|
|
@@ -111,9 +111,7 @@ def recipe_install_handler(
|
|
111
111
|
|
112
112
|
# 4. Install the recipe
|
113
113
|
try:
|
114
|
-
installable_recipes = PyPiManager.install_package(
|
115
|
-
package_id, package_version, valid_source
|
116
|
-
)
|
114
|
+
installable_recipes = PyPiManager.install_package(package_id, package_version, valid_source)
|
117
115
|
except Exception as e: # pylint: disable=broad-except
|
118
116
|
respond_with_error(f"Failed to install package: {e}", sock)
|
119
117
|
return
|
@@ -13,7 +13,7 @@ from rewrite_remote.remoting import (
|
|
13
13
|
)
|
14
14
|
|
15
15
|
from rewrite_remote.remote_utils import COMMAND_END
|
16
|
-
from rewrite_remote.remoting import
|
16
|
+
from rewrite_remote.remoting import RemotingContext
|
17
17
|
from rewrite_remote.handlers.handler_helpers import respond_with_error
|
18
18
|
|
19
19
|
from rewrite import InMemoryExecutionContext
|
@@ -98,9 +98,7 @@ def run_recipe_load_and_visitor_handler(
|
|
98
98
|
RemotingExecutionContextView.view(ctx).remoting_context = remoting_ctx
|
99
99
|
|
100
100
|
try:
|
101
|
-
recipe_instance = PyPiManager.load_recipe(
|
102
|
-
recipe_name, recipe_source, recipe_options
|
103
|
-
)
|
101
|
+
recipe_instance = PyPiManager.load_recipe(recipe_name, recipe_source, recipe_options)
|
104
102
|
except Exception as e:
|
105
103
|
respond_with_error(f"Failed to load recipe: {e}", sock)
|
106
104
|
return
|
@@ -122,9 +120,7 @@ def run_recipe_load_and_visitor_handler(
|
|
122
120
|
|
123
121
|
# 5. Write the response
|
124
122
|
response_encoder = BytesIO()
|
125
|
-
RemotingMessenger.send_tree(
|
126
|
-
remoting_ctx, response_encoder, RemotingMessenger._state, received
|
127
|
-
)
|
123
|
+
RemotingMessenger.send_tree(remoting_ctx, response_encoder, RemotingMessenger._state, received)
|
128
124
|
|
129
125
|
encoded_response = b""
|
130
126
|
encoded_response += dumps(RemotingMessageType.Response)
|
rewrite_remote/receiver.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
# type: ignore
|
2
2
|
# Ignoring type checking for this file because there is too many errors for now
|
3
3
|
|
4
|
+
import struct
|
4
5
|
from collections import OrderedDict
|
5
6
|
from enum import Enum
|
6
7
|
from pathlib import Path
|
@@ -22,8 +23,9 @@ from typing import (
|
|
22
23
|
from uuid import UUID
|
23
24
|
|
24
25
|
from _cbor2 import break_marker
|
25
|
-
from cbor2 import CBORDecoder
|
26
|
+
from cbor2 import CBORDecoder, CBORDecodeValueError
|
26
27
|
from cbor2._decoder import major_decoders
|
28
|
+
|
27
29
|
from rewrite import (
|
28
30
|
Markers,
|
29
31
|
Marker,
|
@@ -33,7 +35,6 @@ from rewrite import (
|
|
33
35
|
UnknownJavaMarker,
|
34
36
|
)
|
35
37
|
from rewrite import Tree, TreeVisitor, Cursor, FileAttributes
|
36
|
-
|
37
38
|
from . import remote_utils, type_utils
|
38
39
|
from .event import DiffEvent, EventType
|
39
40
|
|
@@ -86,7 +87,10 @@ class ReceiverFactory(Protocol):
|
|
86
87
|
|
87
88
|
class DetailsReceiver(Protocol[T]):
|
88
89
|
def receive_details(
|
89
|
-
self,
|
90
|
+
self,
|
91
|
+
before: Optional[T],
|
92
|
+
type: Optional[Type[T]],
|
93
|
+
ctx: "ReceiverContext",
|
90
94
|
) -> T:
|
91
95
|
pass
|
92
96
|
|
@@ -111,7 +115,10 @@ class ReceiverContext:
|
|
111
115
|
return cast(Optional[T], OmniReceiver().receive(before, self))
|
112
116
|
|
113
117
|
def receive_tree(
|
114
|
-
self,
|
118
|
+
self,
|
119
|
+
before: Optional[Tree],
|
120
|
+
tree_type: Optional[str],
|
121
|
+
ctx: "ReceiverContext",
|
115
122
|
) -> Tree:
|
116
123
|
if before:
|
117
124
|
return before.accept(self.visitor, ctx)
|
@@ -123,9 +130,7 @@ class ReceiverContext:
|
|
123
130
|
def polymorphic_receive_tree(self, before: Optional[Tree]) -> Optional[Tree]:
|
124
131
|
diff_event = self.receiver.receive_node()
|
125
132
|
if diff_event.event_type in (EventType.Add, EventType.Update):
|
126
|
-
tree_receiver = self.new_receiver(
|
127
|
-
diff_event.concrete_type or type(before).__name__
|
128
|
-
)
|
133
|
+
tree_receiver = self.new_receiver(diff_event.concrete_type or type(before).__name__)
|
129
134
|
forked = tree_receiver.fork(self)
|
130
135
|
return forked.receive_tree(
|
131
136
|
None if diff_event.event_type == EventType.Add else before,
|
@@ -159,7 +164,10 @@ class ReceiverContext:
|
|
159
164
|
return before
|
160
165
|
|
161
166
|
def receive_markers(
|
162
|
-
self,
|
167
|
+
self,
|
168
|
+
before: Optional[Markers],
|
169
|
+
type: Optional[str],
|
170
|
+
ctx: "ReceiverContext",
|
163
171
|
) -> Markers:
|
164
172
|
id_ = self.receive_value(getattr(before, "id", None), UUID)
|
165
173
|
after_markers: Optional[List[Marker]] = self.receive_values(
|
@@ -177,9 +185,7 @@ class ReceiverContext:
|
|
177
185
|
) -> Optional[List[A]]:
|
178
186
|
return remote_utils.receive_nodes(before, details, self)
|
179
187
|
|
180
|
-
def receive_values(
|
181
|
-
self, before: Optional[List[V]], type: Type[Any]
|
182
|
-
) -> Optional[List[V]]:
|
188
|
+
def receive_values(self, before: Optional[List[V]], type: Type[Any]) -> Optional[List[V]]:
|
183
189
|
return remote_utils.receive_values(before, type, self)
|
184
190
|
|
185
191
|
def receive_value(self, before: Optional[V], type: Type[Any]) -> Optional[V]:
|
@@ -198,7 +204,7 @@ class ReceiverContext:
|
|
198
204
|
ReceiverContext.Registry[type_] = receiver_factory
|
199
205
|
|
200
206
|
|
201
|
-
ValueDeserializer = Callable[[
|
207
|
+
ValueDeserializer = Callable[[str, CBORDecoder, "DeserializationContext"], Optional[Any]]
|
202
208
|
|
203
209
|
|
204
210
|
class DefaultValueDeserializer(ValueDeserializer):
|
@@ -256,7 +262,7 @@ class DeserializationContext:
|
|
256
262
|
initial_byte = decoder.read(1)[0]
|
257
263
|
major_type = initial_byte >> 5
|
258
264
|
subtype = initial_byte & 31
|
259
|
-
concrete_type = None
|
265
|
+
concrete_type: Optional[str] = None
|
260
266
|
|
261
267
|
# Object ID for Marker, JavaType, etc.
|
262
268
|
if major_type == 0:
|
@@ -271,11 +277,15 @@ class DeserializationContext:
|
|
271
277
|
if get_origin(expected_type) in (List, list):
|
272
278
|
expected_elem_type = get_args(expected_type)[0]
|
273
279
|
array = []
|
274
|
-
|
275
|
-
|
276
|
-
|
280
|
+
length = _decode_length(decoder, subtype, allow_indefinite=True)
|
281
|
+
if length:
|
282
|
+
for _ in range(length):
|
283
|
+
elem = self.deserialize(expected_elem_type, decoder)
|
284
|
+
array.append(elem)
|
277
285
|
else:
|
278
|
-
while
|
286
|
+
while (
|
287
|
+
not (value := self.deserialize(expected_elem_type, decoder)) == break_marker
|
288
|
+
):
|
279
289
|
array.append(value)
|
280
290
|
return array
|
281
291
|
else:
|
@@ -301,7 +311,7 @@ class DeserializationContext:
|
|
301
311
|
id_ = UUID(bytes=decoder.decode())
|
302
312
|
return SearchResult(id_, desc)
|
303
313
|
|
304
|
-
if
|
314
|
+
if deser := self.value_deserializers.get(concrete_type):
|
305
315
|
return deser(concrete_type, decoder, self)
|
306
316
|
|
307
317
|
for type_, value_deserializer in self.value_deserializers.items():
|
@@ -323,6 +333,8 @@ class DeserializationContext:
|
|
323
333
|
return decoder.decode_string(subtype)
|
324
334
|
elif major_type == 2:
|
325
335
|
return decoder.decode_bytestring(subtype)
|
336
|
+
elif major_type == 7:
|
337
|
+
return decoder.decode_special(subtype)
|
326
338
|
else:
|
327
339
|
return major_decoders[major_type](decoder, subtype)
|
328
340
|
|
@@ -378,9 +390,7 @@ class DeserializationContext:
|
|
378
390
|
if concrete_type == "java.math.BigDecimal":
|
379
391
|
return decoder.decode()
|
380
392
|
|
381
|
-
raise NotImplementedError(
|
382
|
-
f"No deserialization implemented for: {concrete_type}"
|
383
|
-
)
|
393
|
+
raise NotImplementedError(f"No deserialization implemented for: {concrete_type}")
|
384
394
|
|
385
395
|
if state == cbor2.CborReaderState.ARRAY:
|
386
396
|
decoder.read_array_start()
|
@@ -425,9 +435,7 @@ class DeserializationContext:
|
|
425
435
|
for type_, deserializer in self.value_deserializers.items():
|
426
436
|
if issubclass(actual_type, type_):
|
427
437
|
return deserializer.deserialize(actual_type, decoder, self)
|
428
|
-
raise NotImplementedError(
|
429
|
-
f"No deserialization implemented for: {expected_type}"
|
430
|
-
)
|
438
|
+
raise NotImplementedError(f"No deserialization implemented for: {expected_type}")
|
431
439
|
|
432
440
|
|
433
441
|
class JsonReceiver(TreeReceiver):
|
@@ -448,11 +456,7 @@ class JsonReceiver(TreeReceiver):
|
|
448
456
|
concrete_type = None
|
449
457
|
|
450
458
|
if event_type in {EventType.Add, EventType.Update}:
|
451
|
-
if (
|
452
|
-
event_type == EventType.Add
|
453
|
-
and len(array) > 1
|
454
|
-
and isinstance(array[1], str)
|
455
|
-
):
|
459
|
+
if event_type == EventType.Add and len(array) > 1 and isinstance(array[1], str):
|
456
460
|
concrete_type = array[1]
|
457
461
|
|
458
462
|
elif event_type not in {
|
@@ -502,6 +506,24 @@ class JsonReceiver(TreeReceiver):
|
|
502
506
|
return DiffEvent(event_type, concrete_type, msg)
|
503
507
|
|
504
508
|
|
509
|
+
def _decode_length(
|
510
|
+
decoder: CBORDecoder, subtype: int, allow_indefinite: bool = False
|
511
|
+
) -> Optional[int]:
|
512
|
+
if subtype < 24:
|
513
|
+
return subtype
|
514
|
+
elif subtype == 24:
|
515
|
+
return decoder.read(1)[0]
|
516
|
+
elif subtype == 25:
|
517
|
+
return cast(int, struct.unpack(">H", decoder.read(2))[0])
|
518
|
+
elif subtype == 26:
|
519
|
+
return cast(int, struct.unpack(">L", decoder.read(4))[0])
|
520
|
+
elif subtype == 27:
|
521
|
+
return cast(int, struct.unpack(">Q", decoder.read(8))[0])
|
522
|
+
elif subtype == 31 and allow_indefinite:
|
523
|
+
return None
|
524
|
+
else:
|
525
|
+
raise CBORDecodeValueError(f"unknown unsigned integer subtype 0x{subtype:x}")
|
526
|
+
|
505
527
|
class ParseErrorReceiver(Receiver):
|
506
528
|
def fork(self, ctx):
|
507
529
|
return ctx.fork(self.Visitor(), self.Factory())
|
@@ -522,21 +544,15 @@ class ParseErrorReceiver(Receiver):
|
|
522
544
|
parse_error = parse_error.with_markers(
|
523
545
|
ctx.receive_node(parse_error.markers, ctx.receive_markers)
|
524
546
|
)
|
525
|
-
parse_error = parse_error.with_source_path(
|
526
|
-
ctx.receive_value(parse_error.source_path)
|
527
|
-
)
|
547
|
+
parse_error = parse_error.with_source_path(ctx.receive_value(parse_error.source_path))
|
528
548
|
parse_error = parse_error.with_file_attributes(
|
529
549
|
ctx.receive_value(parse_error.file_attributes)
|
530
550
|
)
|
531
|
-
parse_error = parse_error.with_charset_name(
|
532
|
-
ctx.receive_value(parse_error.charset_name)
|
533
|
-
)
|
551
|
+
parse_error = parse_error.with_charset_name(ctx.receive_value(parse_error.charset_name))
|
534
552
|
parse_error = parse_error.with_charset_bom_marked(
|
535
553
|
ctx.receive_value(parse_error.charset_bom_marked)
|
536
554
|
)
|
537
|
-
parse_error = parse_error.with_checksum(
|
538
|
-
ctx.receive_value(parse_error.checksum)
|
539
|
-
)
|
555
|
+
parse_error = parse_error.with_checksum(ctx.receive_value(parse_error.checksum))
|
540
556
|
parse_error = parse_error.with_text(ctx.receive_value(parse_error.text))
|
541
557
|
# parse_error = parse_error.with_erroneous(ctx.receive_tree(parse_error.erroneous))
|
542
558
|
return parse_error
|