agx-openplx 0.15.0__cp39-cp39-macosx_11_0_x86_64.whl
Sign up to get free protection for your applications and to get access to all the features.
- agx_openplx-0.15.0.dist-info/METADATA +231 -0
- agx_openplx-0.15.0.dist-info/RECORD +41 -0
- agx_openplx-0.15.0.dist-info/WHEEL +4 -0
- agx_openplx-0.15.0.dist-info/entry_points.txt +8 -0
- openplx/Core.py +7781 -0
- openplx/DriveTrain.py +8574 -0
- openplx/Math.py +5372 -0
- openplx/Physics.py +36195 -0
- openplx/Physics1D.py +6732 -0
- openplx/Physics3D.py +42524 -0
- openplx/Robotics.py +15762 -0
- openplx/Simulation.py +1056 -0
- openplx/Terrain.py +3891 -0
- openplx/Urdf.py +654 -0
- openplx/Vehicles.py +8793 -0
- openplx/Visuals.py +3901 -0
- openplx/_AgxOpenPlxPyApi.cpython-39-darwin.so +0 -0
- openplx/_CorePythonSwig.cpython-39-darwin.so +0 -0
- openplx/_DriveTrainSwig.cpython-39-darwin.so +0 -0
- openplx/_MathSwig.cpython-39-darwin.so +0 -0
- openplx/_Physics1DSwig.cpython-39-darwin.so +0 -0
- openplx/_Physics3DSwig.cpython-39-darwin.so +0 -0
- openplx/_PhysicsSwig.cpython-39-darwin.so +0 -0
- openplx/_RoboticsSwig.cpython-39-darwin.so +0 -0
- openplx/_SimulationSwig.cpython-39-darwin.so +0 -0
- openplx/_TerrainSwig.cpython-39-darwin.so +0 -0
- openplx/_UrdfSwig.cpython-39-darwin.so +0 -0
- openplx/_VehiclesSwig.cpython-39-darwin.so +0 -0
- openplx/_VisualsSwig.cpython-39-darwin.so +0 -0
- openplx/__init__.py +51 -0
- openplx/agxtoopenplx.py +55 -0
- openplx/anytoopenplx.py +44 -0
- openplx/api.py +1337 -0
- openplx/migrate.py +136 -0
- openplx/migration_hint.py +14 -0
- openplx/migrations.py +521 -0
- openplx/openplx_application.py +133 -0
- openplx/openplx_serialize.py +35 -0
- openplx/openplx_validate.py +57 -0
- openplx/openplx_view.py +14 -0
- openplx/versionaction.py +11 -0
openplx/migrate.py
ADDED
@@ -0,0 +1,136 @@
|
|
1
|
+
#!/usr/bin/env python3
|
2
|
+
"""
|
3
|
+
Command line utility that helps migrating OpenPLX files to a newer version
|
4
|
+
"""
|
5
|
+
from pathlib import Path
|
6
|
+
import itertools
|
7
|
+
import os
|
8
|
+
import tempfile
|
9
|
+
import json
|
10
|
+
import urllib.request
|
11
|
+
import zipfile
|
12
|
+
from io import BytesIO
|
13
|
+
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, SUPPRESS
|
14
|
+
from openplx import __version__, get_error_strings
|
15
|
+
from openplx.Core import OpenPlxContext, parseFromFile, analyze, StringVector, DocumentVector
|
16
|
+
from openplx.migrations import collect_migrations, ReplaceOp
|
17
|
+
from openplx.versionaction import VersionAction
|
18
|
+
|
19
|
+
def download_package_version(package_name, version):
|
20
|
+
"""Download a specific version of a package from PyPI."""
|
21
|
+
url = f"https://pypi.org/pypi/{package_name}/{version}/json"
|
22
|
+
with urllib.request.urlopen(url, timeout=16) as response:
|
23
|
+
content = response.read().decode('utf-8')
|
24
|
+
data = json.loads(content)
|
25
|
+
return data['urls'][0]['url']
|
26
|
+
|
27
|
+
def unzip_package(url, extract_to):
|
28
|
+
"""Download and unzip a package."""
|
29
|
+
with urllib.request.urlopen(url, timeout=32) as response:
|
30
|
+
file_data = BytesIO(response.read())
|
31
|
+
with zipfile.ZipFile(file_data) as zip_file:
|
32
|
+
zip_file.extractall(extract_to)
|
33
|
+
|
34
|
+
def parse_args():
|
35
|
+
parser = ArgumentParser(description="Migrates a .openplx file from an older to a newer version", formatter_class=ArgumentDefaultsHelpFormatter)
|
36
|
+
parser.add_argument("openplxfile", metavar="path", help="the .openplx file or directory to migrate")
|
37
|
+
parser.add_argument("--version", help="Show version", action=VersionAction, nargs=0, default=SUPPRESS)
|
38
|
+
parser.add_argument("--from-version", help="Version to convert from", required=True)
|
39
|
+
parser.add_argument("--to-version", help="Version to convert to", default=__version__)
|
40
|
+
return parser.parse_known_args()
|
41
|
+
|
42
|
+
def parse_and_analyze(openplxfile, openplx_context):
|
43
|
+
parse_result = parseFromFile(str(Path(openplxfile).absolute()), openplx_context)
|
44
|
+
|
45
|
+
documents = DocumentVector()
|
46
|
+
|
47
|
+
if parse_result[0] is None:
|
48
|
+
return documents
|
49
|
+
|
50
|
+
analyze(openplx_context, None)
|
51
|
+
|
52
|
+
documents.push_back(parse_result[0])
|
53
|
+
return documents
|
54
|
+
|
55
|
+
def has_errors(openplx_context):
|
56
|
+
if openplx_context.hasErrors():
|
57
|
+
error_strings = get_error_strings(openplx_context.getErrors())
|
58
|
+
for e_s in error_strings:
|
59
|
+
print(e_s)
|
60
|
+
return True
|
61
|
+
return False
|
62
|
+
|
63
|
+
def refactor_openplx_file(openplxfile, bundle_path_vec, from_version, to_version) -> bool:
|
64
|
+
print(f"Migrating {openplxfile} from {from_version} to {to_version}")
|
65
|
+
openplx_context = OpenPlxContext(bundle_path_vec)
|
66
|
+
migrations = collect_migrations(from_version, to_version)
|
67
|
+
|
68
|
+
documents = parse_and_analyze(openplxfile, openplx_context)
|
69
|
+
|
70
|
+
if has_errors(openplx_context):
|
71
|
+
return False
|
72
|
+
|
73
|
+
print(f"Found {len(migrations)} migrations ", [m.__name__ for m in migrations])
|
74
|
+
|
75
|
+
order_group = [(key, list(group)) for key, group in itertools.groupby(migrations, lambda m: m.openplx_order)]
|
76
|
+
order_group.sort(key=lambda pair: pair[0])
|
77
|
+
|
78
|
+
for _, migration_group in order_group:
|
79
|
+
ops = []
|
80
|
+
for migration in migration_group:
|
81
|
+
ops.extend(migration(documents))
|
82
|
+
|
83
|
+
for key, op_group in itertools.groupby(ops, lambda op: op.path):
|
84
|
+
if Path(openplxfile).samefile(key):
|
85
|
+
with open(key, 'r', encoding="utf8") as file:
|
86
|
+
lines = file.readlines()
|
87
|
+
lines = ReplaceOp.apply_many(op_group, lines)
|
88
|
+
with open(key, 'w', encoding="utf8") as file:
|
89
|
+
file.writelines(lines)
|
90
|
+
return True
|
91
|
+
|
92
|
+
def run_openplx_migrate(args):
|
93
|
+
|
94
|
+
# NOTE: Depending on version, we need to change the package_name
|
95
|
+
package_name = 'brickbundles'
|
96
|
+
|
97
|
+
# Download the package
|
98
|
+
url = download_package_version(package_name, args.from_version)
|
99
|
+
if url is None:
|
100
|
+
print(f"Could not find the source distribution for {package_name}=={args.from_version}.")
|
101
|
+
return
|
102
|
+
|
103
|
+
# Create a temporary directory
|
104
|
+
with tempfile.TemporaryDirectory() as tmpdirname:
|
105
|
+
tmp_path = str(Path(os.path.realpath(tmpdirname)).absolute())
|
106
|
+
print(f"Extracting to temporary directory: {tmp_path}")
|
107
|
+
unzip_package(url, tmp_path)
|
108
|
+
print(f"Package {package_name}=={args.from_version} extracted to {tmp_path}")
|
109
|
+
bundle_path = str((Path(tmp_path) / package_name).absolute())
|
110
|
+
|
111
|
+
print(f'Using bundle path {bundle_path}')
|
112
|
+
print(os.listdir(bundle_path))
|
113
|
+
|
114
|
+
bundle_path_vec = StringVector()
|
115
|
+
bundle_path_vec.push_back(bundle_path)
|
116
|
+
success = True
|
117
|
+
# Apply the refactoring
|
118
|
+
if os.path.isdir(args.openplxfile):
|
119
|
+
for root, _, files in os.walk(args.openplxfile):
|
120
|
+
for file in files:
|
121
|
+
if file.endswith(".openplx") and not file.endswith("config.openplx"):
|
122
|
+
openplxfile = os.path.join(root, file)
|
123
|
+
if not refactor_openplx_file(openplxfile, bundle_path_vec, args.from_version, args.to_version):
|
124
|
+
success = False
|
125
|
+
else:
|
126
|
+
if not refactor_openplx_file(args.openplxfile, bundle_path_vec, args.from_version, args.to_version):
|
127
|
+
success = False
|
128
|
+
if success:
|
129
|
+
print(f"Refactor from {args.from_version} to {args.to_version} complete!")
|
130
|
+
else:
|
131
|
+
print(f"Refactor from {args.from_version} to {args.to_version} failed due to errors!")
|
132
|
+
print("Note, some files might have been partially migrated.")
|
133
|
+
|
134
|
+
if __name__ == '__main__':
|
135
|
+
arguments, _ = parse_args()
|
136
|
+
run_openplx_migrate(arguments)
|
@@ -0,0 +1,14 @@
|
|
1
|
+
"""
|
2
|
+
Migration hint for use by cmdline tools
|
3
|
+
"""
|
4
|
+
from openplx import check_if_migrate_hint_is_justified, __version__
|
5
|
+
|
6
|
+
class Ansi: # pylint: disable=too-few-public-methods # This is basically an enum, but we do not want to use Enum
|
7
|
+
WARNING = '\033[93m'
|
8
|
+
ENDC = '\033[0m'
|
9
|
+
|
10
|
+
def check_migration_hint(openplxfile, errors):
|
11
|
+
old_version = check_if_migrate_hint_is_justified(__version__, errors)
|
12
|
+
if old_version:
|
13
|
+
print(f"{Ansi.WARNING}Dependency errors might be due to upgrade. If so, try running: "
|
14
|
+
f"openplx_migrate --from-version {old_version} {openplxfile}{Ansi.ENDC}")
|
openplx/migrations.py
ADDED
@@ -0,0 +1,521 @@
|
|
1
|
+
"""
|
2
|
+
This module contains migration functions that can be used to refactor code in a backwards compatible way.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import functools
|
6
|
+
import sys
|
7
|
+
import re
|
8
|
+
from pathlib import Path
|
9
|
+
import itertools
|
10
|
+
from abc import ABC, abstractmethod
|
11
|
+
from openplx.Core import RefactorReplaceOp, RefactorToolkit
|
12
|
+
|
13
|
+
|
14
|
+
def migration(from_version: str, to_version: str, order: int = 0):
|
15
|
+
def decorator(func):
|
16
|
+
@functools.wraps(func)
|
17
|
+
def wrapper(*args, **kwargs):
|
18
|
+
result = func(*args, **kwargs)
|
19
|
+
return result
|
20
|
+
|
21
|
+
wrapper.openplx_from_version = from_version
|
22
|
+
wrapper.openplx_to_version = to_version
|
23
|
+
wrapper.openplx_order = order
|
24
|
+
return wrapper
|
25
|
+
|
26
|
+
return decorator
|
27
|
+
|
28
|
+
|
29
|
+
@migration("0.9.2", "0.9.3")
|
30
|
+
def snakecaseify_methods(documents):
|
31
|
+
ops = []
|
32
|
+
ops.extend(RefactorToolkit.renameMethod(documents, "Math.Vec3.fromXYZ", "from_xyz"))
|
33
|
+
ops.extend(
|
34
|
+
RefactorToolkit.renameMethod(
|
35
|
+
documents, "Math.Vec3.angleBetweenVectors", "angle_between_vectors"
|
36
|
+
)
|
37
|
+
)
|
38
|
+
ops.extend(
|
39
|
+
RefactorToolkit.renameMethod(
|
40
|
+
documents, "Math.Vec3.getOrthogonalUnitVector", "get_orthogonal_unit_vector"
|
41
|
+
)
|
42
|
+
)
|
43
|
+
ops.extend(
|
44
|
+
RefactorToolkit.renameMethod(documents, "Math.Quat.angleAxis", "angle_axis")
|
45
|
+
)
|
46
|
+
ops.extend(RefactorToolkit.renameMethod(documents, "Math.Quat.fromTo", "from_to"))
|
47
|
+
ops.extend(
|
48
|
+
RefactorToolkit.renameMethod(documents, "Math.Quat.fromXYZW", "from_xyzw")
|
49
|
+
)
|
50
|
+
ops.extend(
|
51
|
+
RefactorToolkit.renameMethod(
|
52
|
+
documents, "Math.AffineTransform.fromAxes", "from_axes"
|
53
|
+
)
|
54
|
+
)
|
55
|
+
ops.extend(
|
56
|
+
RefactorToolkit.renameMethod(
|
57
|
+
documents, "Math.AffineTransform.inverseOf", "inverse_of"
|
58
|
+
)
|
59
|
+
)
|
60
|
+
ops.extend(
|
61
|
+
RefactorToolkit.renameMethod(
|
62
|
+
documents, "Math.AffineTransform.transformVec3Point", "transform_vec3_point"
|
63
|
+
)
|
64
|
+
)
|
65
|
+
ops.extend(
|
66
|
+
RefactorToolkit.renameMethod(
|
67
|
+
documents,
|
68
|
+
"Math.AffineTransform.transformVec3Vector",
|
69
|
+
"transform_vec3_vector",
|
70
|
+
)
|
71
|
+
)
|
72
|
+
ops.extend(
|
73
|
+
RefactorToolkit.renameMethod(
|
74
|
+
documents, "Math.Matrix3x3.fromRowMajor", "from_row_major"
|
75
|
+
)
|
76
|
+
)
|
77
|
+
ops.extend(
|
78
|
+
RefactorToolkit.renameMethod(documents, "Math.Matrix3x3.fromRows", "from_rows")
|
79
|
+
)
|
80
|
+
ops.extend(
|
81
|
+
RefactorToolkit.renameMethod(
|
82
|
+
documents, "Math.Matrix3x3.fromColumns", "from_columns"
|
83
|
+
)
|
84
|
+
)
|
85
|
+
ops.extend(
|
86
|
+
RefactorToolkit.renameMethod(
|
87
|
+
documents, "Math.Matrix4x4.fromRowMajor", "from_row_major"
|
88
|
+
)
|
89
|
+
)
|
90
|
+
ops.extend(
|
91
|
+
RefactorToolkit.renameMethod(documents, "Math.Matrix4x4.fromRows", "from_rows")
|
92
|
+
)
|
93
|
+
ops.extend(
|
94
|
+
RefactorToolkit.renameMethod(
|
95
|
+
documents, "Math.Matrix4x4.fromColumns", "from_columns"
|
96
|
+
)
|
97
|
+
)
|
98
|
+
ops.extend(
|
99
|
+
RefactorToolkit.renameMethod(
|
100
|
+
documents, "Math.Matrix4x4.fromVec3Quat", "from_vec3_quat"
|
101
|
+
)
|
102
|
+
)
|
103
|
+
ops.extend(
|
104
|
+
RefactorToolkit.renameMethod(
|
105
|
+
documents, "Math.Matrix4x4.getAffineTranslation", "get_affine_translation"
|
106
|
+
)
|
107
|
+
)
|
108
|
+
ops.extend(
|
109
|
+
RefactorToolkit.renameMethod(
|
110
|
+
documents, "Math.Matrix4x4.getAffineRotation", "get_affine_rotation"
|
111
|
+
)
|
112
|
+
)
|
113
|
+
ops.extend(
|
114
|
+
RefactorToolkit.renameMethod(
|
115
|
+
documents, "Functions.harmonicMean", "harmonic_mean"
|
116
|
+
)
|
117
|
+
)
|
118
|
+
ops.extend(
|
119
|
+
RefactorToolkit.renameMethod(documents, "Math.Line.fromPoints", "from_points")
|
120
|
+
)
|
121
|
+
|
122
|
+
return [ReplaceOp(op) for op in ops]
|
123
|
+
|
124
|
+
|
125
|
+
@migration("0.9.3", "0.10.0")
|
126
|
+
def snakecaseify_methods_093(documents):
|
127
|
+
ops = []
|
128
|
+
# ValueOutputSignal
|
129
|
+
ops.extend(
|
130
|
+
RefactorToolkit.renameMethod(
|
131
|
+
documents, "Physics.Signals.ValueOutputSignal.fromAngle", "from_angle"
|
132
|
+
)
|
133
|
+
)
|
134
|
+
ops.extend(
|
135
|
+
RefactorToolkit.renameMethod(
|
136
|
+
documents,
|
137
|
+
"Physics.Signals.ValueOutputSignal.fromAngularVelocity1D",
|
138
|
+
"from_angular_velocity_1d",
|
139
|
+
)
|
140
|
+
)
|
141
|
+
ops.extend(
|
142
|
+
RefactorToolkit.renameMethod(
|
143
|
+
documents, "Physics.Signals.ValueOutputSignal.fromDistance", "from_distance"
|
144
|
+
)
|
145
|
+
)
|
146
|
+
ops.extend(
|
147
|
+
RefactorToolkit.renameMethod(
|
148
|
+
documents, "Physics.Signals.ValueOutputSignal.fromForce1D", "from_force_1d"
|
149
|
+
)
|
150
|
+
)
|
151
|
+
ops.extend(
|
152
|
+
RefactorToolkit.renameMethod(
|
153
|
+
documents,
|
154
|
+
"Physics.Signals.ValueOutputSignal.fromVelocity1D",
|
155
|
+
"from_velocity_1d",
|
156
|
+
)
|
157
|
+
)
|
158
|
+
ops.extend(
|
159
|
+
RefactorToolkit.renameMethod(
|
160
|
+
documents,
|
161
|
+
"Physics.Signals.ValueOutputSignal.fromTorque1D",
|
162
|
+
"from_torque_1d",
|
163
|
+
)
|
164
|
+
)
|
165
|
+
ops.extend(
|
166
|
+
RefactorToolkit.renameMethod(
|
167
|
+
documents,
|
168
|
+
"Physics.Signals.ValueOutputSignal.fromAcceleration3D",
|
169
|
+
"from_acceleration_3d",
|
170
|
+
)
|
171
|
+
)
|
172
|
+
ops.extend(
|
173
|
+
RefactorToolkit.renameMethod(
|
174
|
+
documents,
|
175
|
+
"Physics.Signals.ValueOutputSignal.fromAngularAcceleration3D",
|
176
|
+
"from_angular_acceleration_3d",
|
177
|
+
)
|
178
|
+
)
|
179
|
+
ops.extend(
|
180
|
+
RefactorToolkit.renameMethod(
|
181
|
+
documents,
|
182
|
+
"Physics.Signals.ValueOutputSignal.fromAngularVelocity3D",
|
183
|
+
"from_angular_velocity_3d",
|
184
|
+
)
|
185
|
+
)
|
186
|
+
ops.extend(
|
187
|
+
RefactorToolkit.renameMethod(
|
188
|
+
documents, "Physics.Signals.ValueOutputSignal.fromForce3D", "from_force_3d"
|
189
|
+
)
|
190
|
+
)
|
191
|
+
ops.extend(
|
192
|
+
RefactorToolkit.renameMethod(
|
193
|
+
documents,
|
194
|
+
"Physics.Signals.ValueOutputSignal.fromTorque3D",
|
195
|
+
"from_torque_3d",
|
196
|
+
)
|
197
|
+
)
|
198
|
+
ops.extend(
|
199
|
+
RefactorToolkit.renameMethod(
|
200
|
+
documents,
|
201
|
+
"Physics.Signals.ValueOutputSignal.fromVelocity3D",
|
202
|
+
"from_velocity_3d",
|
203
|
+
)
|
204
|
+
)
|
205
|
+
ops.extend(
|
206
|
+
RefactorToolkit.renameMethod(
|
207
|
+
documents,
|
208
|
+
"Physics.Signals.ValueOutputSignal.fromPosition3D",
|
209
|
+
"from_position_3d",
|
210
|
+
)
|
211
|
+
)
|
212
|
+
ops.extend(
|
213
|
+
RefactorToolkit.renameMethod(
|
214
|
+
documents, "Physics.Signals.ValueOutputSignal.fromRPY", "from_rpy"
|
215
|
+
)
|
216
|
+
)
|
217
|
+
ops.extend(
|
218
|
+
RefactorToolkit.renameMethod(
|
219
|
+
documents, "Physics.Signals.ValueOutputSignal.isReal", "is_real"
|
220
|
+
)
|
221
|
+
)
|
222
|
+
ops.extend(
|
223
|
+
RefactorToolkit.renameMethod(
|
224
|
+
documents, "Physics.Signals.ValueOutputSignal.asReal", "as_real"
|
225
|
+
)
|
226
|
+
)
|
227
|
+
ops.extend(
|
228
|
+
RefactorToolkit.renameMethod(
|
229
|
+
documents, "Physics.Signals.ValueOutputSignal.asVec3", "as_vec3"
|
230
|
+
)
|
231
|
+
)
|
232
|
+
ops.extend(
|
233
|
+
RefactorToolkit.renameMethod(
|
234
|
+
documents, "Physics.Signals.ValueOutputSignal.asAngle", "as_angle"
|
235
|
+
)
|
236
|
+
)
|
237
|
+
ops.extend(
|
238
|
+
RefactorToolkit.renameMethod(
|
239
|
+
documents,
|
240
|
+
"Physics.Signals.ValueOutputSignal.asAngularVelocity1D",
|
241
|
+
"as_angular_velocity_1d",
|
242
|
+
)
|
243
|
+
)
|
244
|
+
ops.extend(
|
245
|
+
RefactorToolkit.renameMethod(
|
246
|
+
documents, "Physics.Signals.ValueOutputSignal.asDistance", "as_distance"
|
247
|
+
)
|
248
|
+
)
|
249
|
+
ops.extend(
|
250
|
+
RefactorToolkit.renameMethod(
|
251
|
+
documents, "Physics.Signals.ValueOutputSignal.asForce1D", "as_force_1d"
|
252
|
+
)
|
253
|
+
)
|
254
|
+
ops.extend(
|
255
|
+
RefactorToolkit.renameMethod(
|
256
|
+
documents,
|
257
|
+
"Physics.Signals.ValueOutputSignal.asVelocity1D",
|
258
|
+
"as_velocity_1d",
|
259
|
+
)
|
260
|
+
)
|
261
|
+
ops.extend(
|
262
|
+
RefactorToolkit.renameMethod(
|
263
|
+
documents, "Physics.Signals.ValueOutputSignal.asTorque1D", "as_torque_1d"
|
264
|
+
)
|
265
|
+
)
|
266
|
+
ops.extend(
|
267
|
+
RefactorToolkit.renameMethod(
|
268
|
+
documents,
|
269
|
+
"Physics.Signals.ValueOutputSignal.asAcceleration3D",
|
270
|
+
"as_acceleration_3d",
|
271
|
+
)
|
272
|
+
)
|
273
|
+
ops.extend(
|
274
|
+
RefactorToolkit.renameMethod(
|
275
|
+
documents,
|
276
|
+
"Physics.Signals.ValueOutputSignal.asAngularAcceleration3D",
|
277
|
+
"as_angular_acceleration_3d",
|
278
|
+
)
|
279
|
+
)
|
280
|
+
ops.extend(
|
281
|
+
RefactorToolkit.renameMethod(
|
282
|
+
documents,
|
283
|
+
"Physics.Signals.ValueOutputSignal.asAngularVelocity3D",
|
284
|
+
"as_angular_velocity_3d",
|
285
|
+
)
|
286
|
+
)
|
287
|
+
ops.extend(
|
288
|
+
RefactorToolkit.renameMethod(
|
289
|
+
documents, "Physics.Signals.ValueOutputSignal.asForce3D", "as_force_3d"
|
290
|
+
)
|
291
|
+
)
|
292
|
+
ops.extend(
|
293
|
+
RefactorToolkit.renameMethod(
|
294
|
+
documents, "Physics.Signals.ValueOutputSignal.asTorque3D", "as_torque_3d"
|
295
|
+
)
|
296
|
+
)
|
297
|
+
ops.extend(
|
298
|
+
RefactorToolkit.renameMethod(
|
299
|
+
documents,
|
300
|
+
"Physics.Signals.ValueOutputSignal.asVelocity3D",
|
301
|
+
"as_velocity_3d",
|
302
|
+
)
|
303
|
+
)
|
304
|
+
ops.extend(
|
305
|
+
RefactorToolkit.renameMethod(
|
306
|
+
documents,
|
307
|
+
"Physics.Signals.ValueOutputSignal.asPosition3D",
|
308
|
+
"as_position_3d",
|
309
|
+
)
|
310
|
+
)
|
311
|
+
ops.extend(
|
312
|
+
RefactorToolkit.renameMethod(
|
313
|
+
documents, "Robotics.Signals.RobotInputSignal.fromValues", "from_values"
|
314
|
+
)
|
315
|
+
)
|
316
|
+
|
317
|
+
# DriveTrain.Signals
|
318
|
+
ops.extend(
|
319
|
+
RefactorToolkit.moveAndRenameModel(
|
320
|
+
documents,
|
321
|
+
"DriveTrain.Signals.CombustionEngineThrottleInput",
|
322
|
+
"Physics.Signals",
|
323
|
+
"FractionInput",
|
324
|
+
)
|
325
|
+
)
|
326
|
+
ops.extend(
|
327
|
+
RefactorToolkit.moveAndRenameModel(
|
328
|
+
documents,
|
329
|
+
"DriveTrain.Signals.CombustionEngineTorqueOutput",
|
330
|
+
"Physics.Signals",
|
331
|
+
"Torque1DOutput",
|
332
|
+
)
|
333
|
+
)
|
334
|
+
ops.extend(
|
335
|
+
RefactorToolkit.moveAndRenameModel(
|
336
|
+
documents,
|
337
|
+
"DriveTrain.Signals.GearTorqueOutput",
|
338
|
+
"Physics.Signals",
|
339
|
+
"Torque1DOutput",
|
340
|
+
)
|
341
|
+
)
|
342
|
+
|
343
|
+
return [ReplaceOp(op) for op in ops]
|
344
|
+
|
345
|
+
|
346
|
+
@migration("0.10.3", "0.11.0")
|
347
|
+
def migrations_for_0_10_3(documents):
|
348
|
+
ops = []
|
349
|
+
# Physics3D.Transform -> Math.AffineTransform
|
350
|
+
ops.extend(
|
351
|
+
RefactorToolkit.moveAndRenameModel(
|
352
|
+
documents, "Physics3D.Transform", "Math", "AffineTransform"
|
353
|
+
)
|
354
|
+
)
|
355
|
+
# Robotics.Joints.FlexibleVelocityJoint -> Robotics.Joints.FlexibleAngularVelocityJoint
|
356
|
+
ops.extend(
|
357
|
+
RefactorToolkit.renameModel(
|
358
|
+
documents,
|
359
|
+
"Robotics.Joints.FlexibleVelocityJoint",
|
360
|
+
"FlexibleAngularVelocityJoint",
|
361
|
+
)
|
362
|
+
)
|
363
|
+
ops.extend(
|
364
|
+
RefactorToolkit.renameModel(
|
365
|
+
documents,
|
366
|
+
"Physics3D.Signals.MateConnector.Velocity3DOutput",
|
367
|
+
"LinearVelocity3DOutput",
|
368
|
+
)
|
369
|
+
)
|
370
|
+
return [ReplaceOp(op) for op in ops]
|
371
|
+
|
372
|
+
|
373
|
+
@migration("0.11.0", "0.11.1")
|
374
|
+
def migrations_for_0_11_0(documents):
|
375
|
+
ops = []
|
376
|
+
ops.extend(
|
377
|
+
RefactorToolkit.renameModel(
|
378
|
+
documents,
|
379
|
+
"Physics3D.Signals.RigidBodyVelocityOutput",
|
380
|
+
"LinearVelocity3DOutput",
|
381
|
+
)
|
382
|
+
)
|
383
|
+
ops.extend(
|
384
|
+
RefactorToolkit.renameAttribute(
|
385
|
+
documents, "Physics3D.Signals.RigidBodyVelocityOutput.rigid_body", "source"
|
386
|
+
)
|
387
|
+
)
|
388
|
+
return [ReplaceOp(op) for op in ops]
|
389
|
+
|
390
|
+
|
391
|
+
@migration("0.12.1", "0.12.2")
|
392
|
+
def migrations_for_0_12_1(documents):
|
393
|
+
ops = []
|
394
|
+
ops.extend(
|
395
|
+
RefactorToolkit.moveAndRenameModel(
|
396
|
+
documents,
|
397
|
+
"Physics3D.Signals.PrismaticVelocityOutput",
|
398
|
+
"Physics.Signals",
|
399
|
+
"LinearVelocity1DOutput",
|
400
|
+
)
|
401
|
+
)
|
402
|
+
ops.extend(
|
403
|
+
RefactorToolkit.renameAttribute(
|
404
|
+
documents, "Physics3D.Signals.PrismaticVelocityOutput.prismatic", "source"
|
405
|
+
)
|
406
|
+
)
|
407
|
+
ops.extend(
|
408
|
+
RefactorToolkit.renameAttribute(
|
409
|
+
documents,
|
410
|
+
"DriveTrain.ManualClutch.initial_opening_fraction",
|
411
|
+
"initial_engagement_fraction",
|
412
|
+
)
|
413
|
+
)
|
414
|
+
ops.extend(
|
415
|
+
RefactorToolkit.renameAttribute(
|
416
|
+
documents,
|
417
|
+
"DriveTrain.ManualClutch.opening_fraction_input",
|
418
|
+
"engagement_fraction_input",
|
419
|
+
)
|
420
|
+
)
|
421
|
+
ops.extend(
|
422
|
+
RefactorToolkit.renameAttribute(
|
423
|
+
documents,
|
424
|
+
"DriveTrain.ManualClutch.opening_fraction_output",
|
425
|
+
"engagement_fraction_output",
|
426
|
+
)
|
427
|
+
)
|
428
|
+
ops.extend(
|
429
|
+
RefactorToolkit.renameAttribute(
|
430
|
+
documents,
|
431
|
+
"DriveTrain.AutomaticClutch.engage_time",
|
432
|
+
"engagement_time",
|
433
|
+
)
|
434
|
+
)
|
435
|
+
|
436
|
+
return [ReplaceOp(op) for op in ops]
|
437
|
+
|
438
|
+
|
439
|
+
@migration("0.13.1", "0.13.2")
|
440
|
+
def migrations_for_0_13_1(documents):
|
441
|
+
ops = []
|
442
|
+
# DriveTrain.Gear -> DriveTrain.FlexibleGear
|
443
|
+
ops.extend(
|
444
|
+
RefactorToolkit.renameModel(
|
445
|
+
documents,
|
446
|
+
"DriveTrain.Gear",
|
447
|
+
"FlexibleGear",
|
448
|
+
)
|
449
|
+
)
|
450
|
+
|
451
|
+
return [ReplaceOp(op) for op in ops]
|
452
|
+
|
453
|
+
|
454
|
+
def split_version(v):
|
455
|
+
match = re.match(r"^(\d+)\.(\d+)\.(\d+)", v)
|
456
|
+
|
457
|
+
if not match:
|
458
|
+
raise ValueError("Invalid version format")
|
459
|
+
|
460
|
+
return tuple(int(part) for part in match.groups())
|
461
|
+
|
462
|
+
|
463
|
+
def collect_migrations(from_version, to_version):
|
464
|
+
migrations = filter(
|
465
|
+
lambda obj: (
|
466
|
+
callable(obj)
|
467
|
+
and hasattr(obj, "openplx_from_version")
|
468
|
+
and hasattr(obj, "openplx_to_version")
|
469
|
+
and hasattr(obj, "openplx_order")
|
470
|
+
and split_version(from_version) <= split_version(obj.openplx_from_version)
|
471
|
+
and split_version(obj.openplx_from_version) < split_version(to_version)
|
472
|
+
),
|
473
|
+
[obj for _, obj in globals().items()],
|
474
|
+
)
|
475
|
+
|
476
|
+
return sorted(list(migrations), key=lambda m: split_version(m.openplx_from_version))
|
477
|
+
|
478
|
+
|
479
|
+
class MigrateOp(ABC): # pylint: disable=R0903 # Too few public methods
|
480
|
+
@abstractmethod
|
481
|
+
def apply_to(self, lines, offset):
|
482
|
+
pass
|
483
|
+
|
484
|
+
|
485
|
+
class ReplaceOp(MigrateOp):
|
486
|
+
def __init__(self, op: RefactorReplaceOp):
|
487
|
+
self.path = Path(op.source_id)
|
488
|
+
self.from_line = op.from_line
|
489
|
+
self.from_column = op.from_column
|
490
|
+
self.end_line = op.end_line
|
491
|
+
self.end_column = op.end_column
|
492
|
+
self.new_content = op.new_content
|
493
|
+
|
494
|
+
def __str__(self):
|
495
|
+
return f"{{{self.path}, {self.from_line}, {self.from_column}, {self.end_line}, {self.end_column}, {self.new_content} }}"
|
496
|
+
|
497
|
+
@staticmethod
|
498
|
+
def apply_many(ops, lines):
|
499
|
+
# There can be multiple ops per line, we need to run them in order sorted by column
|
500
|
+
for _, line_group in itertools.groupby(ops, lambda op: op.from_line):
|
501
|
+
offset = 0
|
502
|
+
for op in sorted(line_group, key=lambda op: op.from_column):
|
503
|
+
lines = op.apply_to(lines, offset)
|
504
|
+
offset += len(op.new_content) - op.end_column + op.from_column
|
505
|
+
return lines
|
506
|
+
|
507
|
+
def apply_to(self, lines, offset):
|
508
|
+
|
509
|
+
if self.end_line != self.from_line:
|
510
|
+
print("Multiple line replace ops are not supported", file=sys.stderr)
|
511
|
+
return []
|
512
|
+
|
513
|
+
target_line = lines[self.from_line - 1]
|
514
|
+
|
515
|
+
lines[self.from_line - 1] = (
|
516
|
+
target_line[: (self.from_column + offset - 1)]
|
517
|
+
+ self.new_content
|
518
|
+
+ target_line[(self.end_column + offset - 1) :]
|
519
|
+
)
|
520
|
+
|
521
|
+
return lines
|