kuavo-humanoid-sdk 1.1.3a1240__py3-none-any.whl → 1.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kuavo-humanoid-sdk might be problematic. Click here for more details.

@@ -0,0 +1,2 @@
1
+ from .kuavo_strategy import *
2
+ from .grasp_box.grasp_box_strategy import *
@@ -0,0 +1,418 @@
1
+ import time
2
+ import math
3
+ from kuavo_humanoid_sdk.kuavo_strategy.kuavo_strategy import KuavoRobotStrategyBase
4
+ from kuavo_humanoid_sdk.interfaces.data_types import KuavoPose
5
+ from kuavo_humanoid_sdk.interfaces.data_types import KuavoManipulationMpcFrame, KuavoManipulationMpcCtrlMode, KuavoManipulationMpcControlFlow
6
+ from kuavo_humanoid_sdk.interfaces.data_types import EndEffectorSide
7
+ from kuavo_humanoid_sdk.interfaces.data_types import AprilTagData, HomogeneousMatrix, PoseQuaternion
8
+ from kuavo_humanoid_sdk import KuavoRobot, KuavoRobotState, KuavoRobotTools, KuavoRobotVision
9
+ from dataclasses import dataclass
10
+ from typing import Tuple
11
+ import numpy as np
12
+ from scipy.spatial.transform import Rotation as R
13
+
14
+
15
+ @dataclass
16
+ class BoxInfo:
17
+ """箱子信息数据类
18
+
19
+ 描述箱子的位置、尺寸和质量信息,用于箱子抓取策略
20
+
21
+ Attributes:
22
+ pose (KuavoPose): 箱子的位姿信息
23
+ size (Tuple[float, float, float]): 箱子的尺寸 (长, 宽, 高) 单位: 米
24
+ mass (float): 箱子的质量 单位: 千克
25
+ """
26
+ pose: KuavoPose
27
+ size: Tuple[float, float, float] = (0.3, 0.2, 0.15) # 默认箱子尺寸
28
+ mass: float = 1.0 # 默认箱子质量(kg)
29
+
30
+ class KuavoGraspBox(KuavoRobotStrategyBase):
31
+ """箱子抓取策略类,继承自基础策略类"""
32
+
33
+ def __init__(self, robot:KuavoRobot, robot_state:KuavoRobotState, robot_tools:KuavoRobotTools, robot_vision:KuavoRobotVision):
34
+ """初始化箱子抓取策略类
35
+
36
+ Args:
37
+ robot: KuavoRobot实例
38
+ robot_state: KuavoRobotState实例
39
+ robot_tools: KuavoRobotTools实例
40
+ robot_vision: KuavoRobotVision实例
41
+ """
42
+ super().__init__(robot, robot_state, robot_tools, robot_vision)
43
+
44
+ # 箱子抓取相关的配置参数
45
+ self.search_timeout = 20.0 # 搜索超时时间(秒)
46
+ self.approach_timeout = 30.0 # 接近超时时间(秒)
47
+ self.grasp_height_offset = 0.1 # 抓取高度偏移量(米)
48
+
49
+ # 存放头部寻找AprilTag的目标,初始化为异常ID 9999
50
+ self.head_find_target_current_info_pose = AprilTagData(
51
+ id=[9999], # 异常ID
52
+ size=[0.0], # 默认尺寸为0
53
+ pose=[PoseQuaternion(
54
+ position=(0.0, 0.0, 0.0), # 默认零位置
55
+ orientation=(0.0, 0.0, 0.0, 1.0) # 默认朝向(无旋转)
56
+ )]
57
+ )
58
+
59
+ def head_find_target(self, target_info:AprilTagData, max_search_time=None, search_pattern="rotate_head", **kwargs):
60
+ """使用头部旋转寻找AprilTag目标
61
+
62
+ Args:
63
+ target_info: AprilTag的信息
64
+ max_search_time: 最大搜索时间(秒),如果为None则使用默认值
65
+ search_pattern: 搜索模式,"rotate_head"或"rotate_body"
66
+
67
+ Returns:
68
+ bool: 是否成功找到目标
69
+
70
+ logic:
71
+ 1. 判断目标位置是否在机器人FOV(70度视场角)内
72
+ 2. 如果不在FOV内且search_pattern为"rotate_body",先旋转机器人身体朝向目标位置
73
+ 3. 无论如何都使用头部搜索模式尝试找到目标
74
+ 4. 找到apriltag_data_from_odom之后,马上开始头部追踪
75
+ """
76
+ # 初始目标赋值
77
+ self.head_find_target_current_info_pose = target_info
78
+
79
+ # 设置搜索超时时间
80
+ if max_search_time is None:
81
+ max_search_time = self.search_timeout
82
+
83
+ # 获取需要追踪的目标ID
84
+ target_id = target_info.id[0]
85
+
86
+ # 判断目标位置是否在FOV内
87
+ if len(target_info.pose) > 0:
88
+ target_position = target_info.pose[0].position
89
+ robot_position = self.state.robot_position()
90
+ robot_orientation = self.state.robot_orientation()
91
+
92
+ # 计算目标相对于机器人的位置向量
93
+ dx = target_position[0] - robot_position[0]
94
+ dy = target_position[1] - robot_position[1]
95
+
96
+ # 计算目标相对于机器人的角度
97
+ target_angle = math.atan2(dy, dx)
98
+
99
+ # 获取机器人当前朝向的yaw角
100
+ robot_yaw = self._extract_yaw_from_quaternion(robot_orientation)
101
+
102
+ # 计算目标与机器人朝向的角度差
103
+ angle_diff = target_angle - robot_yaw
104
+ # 标准化角度到[-pi, pi]
105
+ while angle_diff > math.pi:
106
+ angle_diff -= 2 * math.pi
107
+ while angle_diff < -math.pi:
108
+ angle_diff += 2 * math.pi
109
+
110
+ # 检查是否在FOV内(70度 = 约1.22弧度)
111
+ FOV_HALF_ANGLE = math.radians(35) # 70度/2 = 35度
112
+ is_in_fov = abs(angle_diff) <= FOV_HALF_ANGLE
113
+
114
+ print(f"目标位置: ({target_position[0]:.2f}, {target_position[1]:.2f})")
115
+ print(f"机器人位置: ({robot_position[0]:.2f}, {robot_position[1]:.2f})")
116
+ print(f"目标角度: {math.degrees(target_angle):.2f}度")
117
+ print(f"机器人朝向: {math.degrees(robot_yaw):.2f}度")
118
+ print(f"角度差: {math.degrees(angle_diff):.2f}度")
119
+ print(f"是否在FOV内: {is_in_fov}")
120
+
121
+ # 如果目标不在FOV内且模式允许旋转身体,先旋转机器人身体
122
+ if not is_in_fov and search_pattern == "rotate_body":
123
+ print("目标超出FOV,调整机器人朝向...")
124
+ # 调整机器人朝向
125
+ print(f"开始调整 - 机器人位置: {robot_position}")
126
+ print(f"开始调整 - 目标角度: {math.degrees(target_angle):.2f}度")
127
+ print(f"开始调整 - 目标角度: {target_angle}")
128
+ self.robot.control_command_pose_world(
129
+ robot_position[0], # 保持机器人当前x位置
130
+ robot_position[1], # 保持机器人当前y位置
131
+ 0.0, # 保持当前z高度
132
+ target_angle # 朝向目标位置 转换为弧度
133
+ )
134
+
135
+ # 等待机器人旋转到位,使用闭环控制替代固定时间等待
136
+ self._wait_for_orientation(target_angle, max_wait_time=10.0, angle_threshold=0.1)
137
+
138
+ # 开始搜索计时
139
+ start_time = time.time()
140
+ found_target = False
141
+
142
+ # 执行头部搜索模式,无论search_pattern是什么
143
+ # 定义头部搜索参数(角度制)
144
+ pitch_angles_deg = [12, -12] # 两档pitch角度:抬头和低头,角度制
145
+ yaw_angles_deg = [-30, -15, 0, 15, 30] # 左右扫描的yaw角度,角度制
146
+
147
+ # 在超时前循环搜索
148
+ while time.time() - start_time < max_search_time and not found_target:
149
+ # 遍历两档pitch角度
150
+ for pitch_deg in pitch_angles_deg:
151
+ # 遍历yaw角度进行左右扫描
152
+ for yaw_deg in yaw_angles_deg:
153
+ # 将角度转换为弧度
154
+ yaw_rad = yaw_deg * 0.0174533 # 度转弧度,math.pi/180
155
+ pitch_rad = pitch_deg * 0.0174533 # 度转弧度
156
+
157
+ # 控制头部旋转(使用弧度)
158
+ self.robot.control_head(yaw=yaw_rad, pitch=pitch_rad)
159
+ # 等待头部移动到位
160
+ time.sleep(0.5)
161
+
162
+ # 检查是否找到目标
163
+ target_data = self.vision.get_data_by_id_from_odom(target_id)
164
+ print(f"target_data: {target_data}")
165
+
166
+ if (target_data is not None and isinstance(target_data, dict) and
167
+ 'poses' in target_data and len(target_data['poses']) > 0):
168
+ print(f"Target AprilTag {target_id} found!")
169
+ found_target = True
170
+ # 开始头部追踪
171
+ print("---- 开始头部追踪 ---- ")
172
+ self.robot.enable_head_tracking(target_id) # self.robot.disable_head_tracking()
173
+ break
174
+
175
+ if found_target:
176
+ break
177
+
178
+ return found_target
179
+
180
+ def _is_orientation_aligned(self, orientation1, orientation2, threshold=0.3):
181
+ """检查两个朝向是否大致一致
182
+
183
+ Args:
184
+ orientation1: 第一个朝向的四元数
185
+ orientation2: 第二个朝向的四元数
186
+ threshold: 判断为一致的阈值
187
+
188
+ Returns:
189
+ bool: 朝向是否一致
190
+ """
191
+ # 这里简化实现,实际应用需要进行四元数计算
192
+ # 提取两个朝向的yaw角并比较差异
193
+ yaw1 = self._extract_yaw_from_quaternion(orientation1)
194
+ yaw2 = self._extract_yaw_from_quaternion(orientation2)
195
+
196
+ # 计算角度差异
197
+ diff = abs(yaw1 - yaw2)
198
+ while diff > math.pi:
199
+ diff -= 2 * math.pi
200
+
201
+ return abs(diff) < threshold
202
+
203
+ def _extract_yaw_from_quaternion(self, quaternion):
204
+ """从四元数中提取yaw角
205
+
206
+ Args:
207
+ quaternion: 四元数 (x, y, z, w)
208
+
209
+ Returns:
210
+ float: yaw角(弧度)
211
+ """
212
+ # 计算yaw角 (围绕z轴的旋转)
213
+ # 四元数到欧拉角的简化计算,仅提取yaw
214
+ x, y, z, w = quaternion
215
+ yaw = math.atan2(2.0 * (w * z + x * y), 1.0 - 2.0 * (y * y + z * z))
216
+ return yaw
217
+
218
+ def _track_target_with_head(self, target_data):
219
+ """使用头部追踪目标
220
+
221
+ Args:
222
+ target_data: 目标数据,包含位置信息
223
+ """
224
+ # 从目标数据中提取相对位置
225
+ position = target_data["position"]
226
+
227
+ # 计算目标相对于机器人的方向
228
+ dx = position[0]
229
+ dy = position[1]
230
+ dz = position[2]
231
+
232
+ # 计算yaw和pitch角度来指向目标
233
+ # 简单的反正切计算(结果为弧度)
234
+ yaw_rad = math.atan2(dy, dx)
235
+ distance = math.sqrt(dx*dx + dy*dy)
236
+ pitch_rad = math.atan2(dz, distance)
237
+
238
+ # 限制角度范围(弧度)
239
+ yaw_rad = min(math.radians(80), max(math.radians(-80), yaw_rad)) # 限制在±80度
240
+ pitch_rad = min(math.radians(25), max(math.radians(-25), pitch_rad)) # 限制在±25度
241
+
242
+ # 控制头部指向目标(输入为弧度)
243
+ self.robot.control_head(yaw=yaw_rad, pitch=pitch_rad)
244
+
245
+ def walk_approach_target(self, target_info:AprilTagData, target_distance=0.5, approach_speed=0.15, **kwargs):
246
+ """走路接近AprilTag目标
247
+
248
+ Args:
249
+ target_info: AprilTag的信息
250
+ target_distance: 与目标的期望距离(米)
251
+ approach_speed: 接近速度(米/秒)
252
+
253
+ Returns:
254
+ bool: 是否成功接近目标
255
+ """
256
+ approach_success = False
257
+ start_time = time.time()
258
+ tag_id = target_info.id[0]
259
+ target_data = self.vision.get_data_by_id_from_odom(tag_id)
260
+ if target_data is None:
261
+ print(f"未找到目标ID: {tag_id} 的目标数据!")
262
+ return False
263
+ target_pose = target_data["poses"][0]
264
+ print(f"target_pose in _approach_target: {target_pose}")
265
+ while not approach_success:
266
+ approach_success = self._approach_target(target_pose, target_distance, approach_speed, **kwargs)
267
+ time.sleep(1)
268
+ time_cost = time.time() - start_time
269
+ print(f"walking approach target..., time_cost: {time_cost:.2f}秒.")
270
+ return approach_success
271
+
272
+ def _approach_target(self, target_pose, target_distance=0.5, approach_speed=0.15, **kwargs):
273
+ """根据目标信息和目标距离计算目标位姿
274
+
275
+ Args:
276
+ target_pose: 目标位姿信息
277
+ target_distance: 与目标的期望距离(米)
278
+ approach_speed: 接近速度(米/秒)
279
+
280
+ Returns:
281
+ bool: 是否成功接近目标
282
+ """
283
+ p_wa = np.array([target_pose.position.x, target_pose.position.y, target_pose.position.z])
284
+ quat_wa = np.array([target_pose.orientation.x, target_pose.orientation.y, target_pose.orientation.z, target_pose.orientation.w]) # x,y,z,w
285
+ R_wa = R.from_quat(quat_wa).as_matrix()
286
+ def get_target_pose_by_distance(p_wa, R_wa, target_distance=0.5):
287
+ """根据目标信息和目标距离计算目标位姿"""
288
+ p_at = np.array([0, 0, target_distance], np.float32)
289
+ p_at_w = R_wa @ p_at
290
+ p_wt = p_wa + p_at_w
291
+ yaw = math.atan2(p_at_w[1], p_at_w[0])
292
+ yaw += math.pi
293
+ while yaw > math.pi:
294
+ yaw -= 2 * math.pi
295
+ while yaw < -math.pi:
296
+ yaw += 2 * math.pi
297
+ return p_wt, yaw
298
+
299
+ p_wt, angle = get_target_pose_by_distance(p_wa, R_wa, target_distance)
300
+ self.robot.control_command_pose_world(p_wt[0], p_wt[1], 0, angle)
301
+
302
+ yaw_reached = self._yaw_check(angle)
303
+ pos_reached = self._pos_check(p_wt)
304
+ stance_check = (self.state == 'stance')
305
+ print(f"yaw_reached: {yaw_reached}, pos_reached: {pos_reached}, stance_check: {stance_check}")
306
+ return yaw_reached and pos_reached # and stance_check
307
+
308
+ def arm_move_to_target(self, target_pose:KuavoPose, approach_speed=0.15, **kwargs):
309
+ """手臂移动到目标位置
310
+
311
+ Args:
312
+ target_pose: 目标位置,可以是KuavoPose对象或包含position的字典
313
+ approach_speed: 接近速度(米/秒)
314
+ Returns:
315
+ bool: 是否成功移动到目标位置
316
+ """
317
+ return True
318
+
319
+ def arm_transport_target_up(self, target_info:BoxInfo, arm_mode="manipulation_mpc"):
320
+ """实现手臂搬起箱子的功能
321
+
322
+ Args:
323
+ target_info: 目标信息,包含位置、尺寸等
324
+ arm_mode: 手臂控制模式
325
+
326
+ Returns:
327
+ bool: 是否成功搬起目标
328
+ """
329
+ return True
330
+
331
+ def arm_transport_target_down(self, target_info:BoxInfo, arm_mode="manipulation_mpc"):
332
+ """实现手臂放下箱子的功能
333
+
334
+ Args:
335
+ target_info: 目标放置位置信息
336
+ arm_mode: 手臂控制模式
337
+
338
+ Returns:
339
+ bool: 是否成功放下目标
340
+ """
341
+ return True
342
+
343
+ def _wait_for_orientation(self, target_angle, max_wait_time=10.0, angle_threshold=0.1):
344
+ """等待机器人旋转到指定朝向
345
+
346
+ Args:
347
+ target_angle: 目标朝向角度(弧度)
348
+ max_wait_time: 最大等待时间(秒)
349
+ angle_threshold: 角度阈值(弧度),小于此阈值认为已到位
350
+
351
+ Returns:
352
+ bool: 是否成功到达目标朝向
353
+ """
354
+ start_time = time.time()
355
+ rate_hz = 10 # 检查频率
356
+ wait_interval = 1.0 / rate_hz
357
+
358
+ print(f"等待机器人旋转到位,目标角度: {math.degrees(target_angle):.2f}度")
359
+
360
+ while time.time() - start_time < max_wait_time:
361
+ if self._yaw_check(target_angle, angle_threshold):
362
+ return True
363
+
364
+ # 短暂等待再次检查
365
+ time.sleep(wait_interval)
366
+
367
+ # 超时
368
+ print(f"等待机器人旋转到位超时! 已经等待了 {max_wait_time:.2f}秒")
369
+ return False
370
+
371
+ def _yaw_check(self, yaw_angle_target, angle_threshold=0.1):
372
+ """检查机器人当前朝向与目标朝向的差异
373
+
374
+ Args:
375
+ yaw_angle_target: 目标朝向角度(弧度)
376
+ angle_threshold: 角度阈值(弧度),小于此阈值认为已到位
377
+
378
+ Returns:
379
+ bool: 是否成功到达目标朝向
380
+ """
381
+ # 获取当前机器人朝向
382
+ current_orientation = self.state.robot_orientation()
383
+ current_yaw = self._extract_yaw_from_quaternion(current_orientation)
384
+
385
+ # 计算角度差
386
+ angle_diff = yaw_angle_target - current_yaw
387
+ # 标准化到[-pi, pi]
388
+ while angle_diff > math.pi:
389
+ angle_diff -= 2 * math.pi
390
+ while angle_diff < -math.pi:
391
+ angle_diff += 2 * math.pi
392
+
393
+ # 输出当前状态
394
+ print(f"当前朝向: {math.degrees(current_yaw):.2f}度, 目标朝向: {math.degrees(yaw_angle_target):.2f}度, 差值: {math.degrees(abs(angle_diff)):.2f}度")
395
+
396
+ # 检查是否已到位
397
+ if abs(angle_diff) < angle_threshold:
398
+ print(f"机器人已旋转到位!")
399
+ return True
400
+ else:
401
+ return False
402
+
403
+ def _pos_check(self, pos_target, pos_threshold=0.2):
404
+ """检查机器人当前位置(x, y)与目标位置的差异
405
+
406
+ Args:
407
+ pos_target: 目标位置
408
+ pos_threshold: 位置阈值(米),小于此阈值认为已到位
409
+ """
410
+ current_pos = self.state.robot_position()
411
+ # print(f"current_pos: {current_pos}, pos_target: {pos_target}")
412
+ pos_diff = np.linalg.norm(np.array(pos_target[:2]) - np.array(current_pos[:2]))
413
+ print(f"当前位置(x,y): ({current_pos[0]:.2f}, {current_pos[1]:.2f}), 目标位置(x,y): ({pos_target[0]:.2f}, {pos_target[1]:.2f}), 距离: {pos_diff:.2f}米")
414
+ if pos_diff < pos_threshold:
415
+ print(f"机器人已到达目标位置!")
416
+ return True
417
+ else:
418
+ return False
@@ -0,0 +1,63 @@
1
+ from abc import ABC, abstractmethod
2
+ from kuavo_humanoid_sdk import KuavoSDK, KuavoRobot
3
+ from kuavo_humanoid_sdk import KuavoRobotState
4
+ from kuavo_humanoid_sdk import KuavoRobotTools
5
+ from kuavo_humanoid_sdk import KuavoRobotVision
6
+
7
+ class KuavoRobotStrategyBase(ABC):
8
+ """Kuavo机器人策略基础类,提供策略执行的抽象接口"""
9
+
10
+ def __init__(self, robot:KuavoRobot, robot_state:KuavoRobotState, robot_tools:KuavoRobotTools, robot_vision:KuavoRobotVision):
11
+ """初始化策略基础类
12
+
13
+ Args:
14
+ robot: KuavoRobot实例,提供机器人控制能力
15
+ robot_state: KuavoRobotState实例,提供机器人状态信息
16
+ robot_tools: KuavoRobotTools实例,提供坐标转换等工具
17
+ robot_vision: KuavoRobotVision实例,提供视觉感知能力
18
+ """
19
+ self.robot = robot
20
+ self.state = robot_state
21
+ self.tools = robot_tools
22
+ self.vision = robot_vision
23
+
24
+ @abstractmethod
25
+ def head_find_target(self, target_info, **kwargs):
26
+ """寻找特定ID的目标
27
+
28
+ Args:
29
+ target_id: 目标的ID标识
30
+ **kwargs: 其他参数
31
+
32
+ Returns:
33
+ bool: 是否成功找到目标
34
+ """
35
+ pass
36
+
37
+ @abstractmethod
38
+ def walk_approach_target(self, target_info, target_distance=0.5, **kwargs):
39
+ """走/接近特定的目标到指定距离
40
+
41
+ Args:
42
+ target_id: 目标的ID标识
43
+ target_distance: 与目标的期望距离(米)
44
+ **kwargs: 其他参数
45
+
46
+ Returns:
47
+ bool: 是否成功接近目标
48
+ """
49
+ pass
50
+
51
+ @abstractmethod
52
+ def arm_move_to_target(self, target_pose, **kwargs):
53
+ """手臂移动到特定的位置(闭环)
54
+
55
+ Args:
56
+ target_pose: 目标位置姿态
57
+ **kwargs: 其他参数
58
+
59
+ Returns:
60
+ bool: 是否成功移动到目标位置
61
+ """
62
+ pass
63
+
@@ -10,7 +10,7 @@ import kuavo_msgs.msg
10
10
  import std_msgs.msg
11
11
 
12
12
  class twoArmHandPoseCmd(genpy.Message):
13
- _md5sum = "d4b6792a6f960bea428fd7158220110b"
13
+ _md5sum = "cd5f0a3dc4154eb55aff1c874e2dc81e"
14
14
  _type = "kuavo_msgs/twoArmHandPoseCmd"
15
15
  _has_header = False # flag to mark the presence of a Header object
16
16
  _full_text = """twoArmHandPose hand_poses
@@ -18,6 +18,7 @@ class twoArmHandPoseCmd(genpy.Message):
18
18
  bool use_custom_ik_param
19
19
  bool joint_angles_as_q0
20
20
  ikSolveParam ik_param
21
+ int32 frame # 0 keep current frame 1 world frame (based on odom) 2 local frame 3 manipulation world frame
21
22
  ================================================================================
22
23
  MSG: kuavo_msgs/twoArmHandPose
23
24
  Header header
@@ -58,8 +59,8 @@ float64 major_iterations_limit
58
59
  float64 oritation_constraint_tol
59
60
  float64 pos_constraint_tol # work when pos_cost_weight > 0.0
60
61
  float64 pos_cost_weight"""
61
- __slots__ = ['hand_poses','use_custom_ik_param','joint_angles_as_q0','ik_param']
62
- _slot_types = ['kuavo_msgs/twoArmHandPose','bool','bool','kuavo_msgs/ikSolveParam']
62
+ __slots__ = ['hand_poses','use_custom_ik_param','joint_angles_as_q0','ik_param','frame']
63
+ _slot_types = ['kuavo_msgs/twoArmHandPose','bool','bool','kuavo_msgs/ikSolveParam','int32']
63
64
 
64
65
  def __init__(self, *args, **kwds):
65
66
  """
@@ -69,7 +70,7 @@ float64 pos_cost_weight"""
69
70
  changes. You cannot mix in-order arguments and keyword arguments.
70
71
 
71
72
  The available fields are:
72
- hand_poses,use_custom_ik_param,joint_angles_as_q0,ik_param
73
+ hand_poses,use_custom_ik_param,joint_angles_as_q0,ik_param,frame
73
74
 
74
75
  :param args: complete set of field values, in .msg order
75
76
  :param kwds: use keyword arguments corresponding to message field names
@@ -86,11 +87,14 @@ float64 pos_cost_weight"""
86
87
  self.joint_angles_as_q0 = False
87
88
  if self.ik_param is None:
88
89
  self.ik_param = kuavo_msgs.msg.ikSolveParam()
90
+ if self.frame is None:
91
+ self.frame = 0
89
92
  else:
90
93
  self.hand_poses = kuavo_msgs.msg.twoArmHandPose()
91
94
  self.use_custom_ik_param = False
92
95
  self.joint_angles_as_q0 = False
93
96
  self.ik_param = kuavo_msgs.msg.ikSolveParam()
97
+ self.frame = 0
94
98
 
95
99
  def _get_types(self):
96
100
  """
@@ -121,7 +125,7 @@ float64 pos_cost_weight"""
121
125
  buff.write(_get_struct_3d().pack(*self.hand_poses.right_pose.elbow_pos_xyz))
122
126
  buff.write(_get_struct_7d().pack(*self.hand_poses.right_pose.joint_angles))
123
127
  _x = self
124
- buff.write(_get_struct_2B7d().pack(_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight))
128
+ buff.write(_get_struct_2B7di().pack(_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight, _x.frame))
125
129
  except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
126
130
  except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
127
131
 
@@ -177,8 +181,8 @@ float64 pos_cost_weight"""
177
181
  self.hand_poses.right_pose.joint_angles = _get_struct_7d().unpack(str[start:end])
178
182
  _x = self
179
183
  start = end
180
- end += 58
181
- (_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight,) = _get_struct_2B7d().unpack(str[start:end])
184
+ end += 62
185
+ (_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight, _x.frame,) = _get_struct_2B7di().unpack(str[start:end])
182
186
  self.use_custom_ik_param = bool(self.use_custom_ik_param)
183
187
  self.joint_angles_as_q0 = bool(self.joint_angles_as_q0)
184
188
  return self
@@ -210,7 +214,7 @@ float64 pos_cost_weight"""
210
214
  buff.write(self.hand_poses.right_pose.elbow_pos_xyz.tostring())
211
215
  buff.write(self.hand_poses.right_pose.joint_angles.tostring())
212
216
  _x = self
213
- buff.write(_get_struct_2B7d().pack(_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight))
217
+ buff.write(_get_struct_2B7di().pack(_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight, _x.frame))
214
218
  except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
215
219
  except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
216
220
 
@@ -267,8 +271,8 @@ float64 pos_cost_weight"""
267
271
  self.hand_poses.right_pose.joint_angles = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=7)
268
272
  _x = self
269
273
  start = end
270
- end += 58
271
- (_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight,) = _get_struct_2B7d().unpack(str[start:end])
274
+ end += 62
275
+ (_x.use_custom_ik_param, _x.joint_angles_as_q0, _x.ik_param.major_optimality_tol, _x.ik_param.major_feasibility_tol, _x.ik_param.minor_feasibility_tol, _x.ik_param.major_iterations_limit, _x.ik_param.oritation_constraint_tol, _x.ik_param.pos_constraint_tol, _x.ik_param.pos_cost_weight, _x.frame,) = _get_struct_2B7di().unpack(str[start:end])
272
276
  self.use_custom_ik_param = bool(self.use_custom_ik_param)
273
277
  self.joint_angles_as_q0 = bool(self.joint_angles_as_q0)
274
278
  return self
@@ -279,12 +283,12 @@ _struct_I = genpy.struct_I
279
283
  def _get_struct_I():
280
284
  global _struct_I
281
285
  return _struct_I
282
- _struct_2B7d = None
283
- def _get_struct_2B7d():
284
- global _struct_2B7d
285
- if _struct_2B7d is None:
286
- _struct_2B7d = struct.Struct("<2B7d")
287
- return _struct_2B7d
286
+ _struct_2B7di = None
287
+ def _get_struct_2B7di():
288
+ global _struct_2B7di
289
+ if _struct_2B7di is None:
290
+ _struct_2B7di = struct.Struct("<2B7di")
291
+ return _struct_2B7di
288
292
  _struct_3I = None
289
293
  def _get_struct_3I():
290
294
  global _struct_3I
@@ -14,6 +14,7 @@ from ._handForceLevel import *
14
14
  from ._jointMoveTo import *
15
15
  from ._playmusic import *
16
16
  from ._setHwIntialState import *
17
+ from ._setMmCtrlFrame import *
17
18
  from ._setMotorEncoderRoundService import *
18
19
  from ._setTagId import *
19
20
  from ._singleStepControl import *