wirepod-vector-sdk-audio 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anki_vector/__init__.py +43 -0
- anki_vector/animation.py +272 -0
- anki_vector/annotate.py +590 -0
- anki_vector/audio.py +212 -0
- anki_vector/audio_stream.py +335 -0
- anki_vector/behavior.py +1135 -0
- anki_vector/camera.py +670 -0
- anki_vector/camera_viewer/__init__.py +121 -0
- anki_vector/color.py +88 -0
- anki_vector/configure/__main__.py +331 -0
- anki_vector/connection.py +838 -0
- anki_vector/events.py +420 -0
- anki_vector/exceptions.py +185 -0
- anki_vector/faces.py +819 -0
- anki_vector/lights.py +210 -0
- anki_vector/mdns.py +131 -0
- anki_vector/messaging/__init__.py +45 -0
- anki_vector/messaging/alexa_pb2.py +36 -0
- anki_vector/messaging/alexa_pb2_grpc.py +3 -0
- anki_vector/messaging/behavior_pb2.py +40 -0
- anki_vector/messaging/behavior_pb2_grpc.py +3 -0
- anki_vector/messaging/client.py +33 -0
- anki_vector/messaging/cube_pb2.py +113 -0
- anki_vector/messaging/cube_pb2_grpc.py +3 -0
- anki_vector/messaging/extensions_pb2.py +25 -0
- anki_vector/messaging/extensions_pb2_grpc.py +3 -0
- anki_vector/messaging/external_interface_pb2.py +169 -0
- anki_vector/messaging/external_interface_pb2_grpc.py +1267 -0
- anki_vector/messaging/messages_pb2.py +431 -0
- anki_vector/messaging/messages_pb2_grpc.py +3 -0
- anki_vector/messaging/nav_map_pb2.py +33 -0
- anki_vector/messaging/nav_map_pb2_grpc.py +3 -0
- anki_vector/messaging/protocol.py +33 -0
- anki_vector/messaging/response_status_pb2.py +27 -0
- anki_vector/messaging/response_status_pb2_grpc.py +3 -0
- anki_vector/messaging/settings_pb2.py +72 -0
- anki_vector/messaging/settings_pb2_grpc.py +3 -0
- anki_vector/messaging/shared_pb2.py +54 -0
- anki_vector/messaging/shared_pb2_grpc.py +3 -0
- anki_vector/motors.py +127 -0
- anki_vector/nav_map.py +409 -0
- anki_vector/objects.py +1782 -0
- anki_vector/opengl/__init__.py +103 -0
- anki_vector/opengl/assets/LICENSE.txt +21 -0
- anki_vector/opengl/assets/cube.jpg +0 -0
- anki_vector/opengl/assets/cube.mtl +9 -0
- anki_vector/opengl/assets/cube.obj +1000 -0
- anki_vector/opengl/assets/vector.mtl +67 -0
- anki_vector/opengl/assets/vector.obj +13220 -0
- anki_vector/opengl/opengl.py +864 -0
- anki_vector/opengl/opengl_vector.py +620 -0
- anki_vector/opengl/opengl_viewer.py +689 -0
- anki_vector/photos.py +145 -0
- anki_vector/proximity.py +176 -0
- anki_vector/reserve_control/__main__.py +36 -0
- anki_vector/robot.py +930 -0
- anki_vector/screen.py +201 -0
- anki_vector/status.py +322 -0
- anki_vector/touch.py +119 -0
- anki_vector/user_intent.py +186 -0
- anki_vector/util.py +1132 -0
- anki_vector/version.py +15 -0
- anki_vector/viewer.py +403 -0
- anki_vector/vision.py +202 -0
- anki_vector/world.py +899 -0
- wirepod_vector_sdk_audio-0.9.0.dist-info/METADATA +80 -0
- wirepod_vector_sdk_audio-0.9.0.dist-info/RECORD +71 -0
- wirepod_vector_sdk_audio-0.9.0.dist-info/WHEEL +5 -0
- wirepod_vector_sdk_audio-0.9.0.dist-info/licenses/LICENSE.txt +180 -0
- wirepod_vector_sdk_audio-0.9.0.dist-info/top_level.txt +1 -0
- wirepod_vector_sdk_audio-0.9.0.dist-info/zip-safe +1 -0
anki_vector/screen.py
ADDED
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
# Copyright (c) 2018 Anki, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License in the file LICENSE.txt or at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""
|
|
16
|
+
Vector's LCD Screen that displays his face.
|
|
17
|
+
|
|
18
|
+
The screen is 184 x 96 color (RGB565) pixels. The active area is 23.2mm x 12.1mm.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
# __all__ should order by constants, event classes, other classes, functions.
|
|
22
|
+
__all__ = ['dimensions', 'convert_image_to_screen_data',
|
|
23
|
+
'convert_pixels_to_screen_data', 'ScreenComponent']
|
|
24
|
+
|
|
25
|
+
import sys
|
|
26
|
+
|
|
27
|
+
from . import color, connection, util
|
|
28
|
+
from .messaging import protocol
|
|
29
|
+
|
|
30
|
+
try:
|
|
31
|
+
from PIL import Image
|
|
32
|
+
except ImportError:
|
|
33
|
+
sys.exit("Cannot import from PIL: Do `pip3 install --user Pillow` to install")
|
|
34
|
+
|
|
35
|
+
SCREEN_WIDTH = 184
|
|
36
|
+
SCREEN_HEIGHT = 96
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def dimensions():
|
|
40
|
+
"""Return the dimension (width, height) of the Screen.
|
|
41
|
+
|
|
42
|
+
.. testcode::
|
|
43
|
+
|
|
44
|
+
import anki_vector
|
|
45
|
+
|
|
46
|
+
screen_dimensions = anki_vector.screen.SCREEN_WIDTH, anki_vector.screen.SCREEN_HEIGHT
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
A tuple of ints (width, height)
|
|
50
|
+
"""
|
|
51
|
+
return SCREEN_WIDTH, SCREEN_HEIGHT
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def convert_pixels_to_screen_data(pixel_data: list, image_width: int, image_height: int):
|
|
55
|
+
"""Convert a sequence of pixel data to the correct format to display on Vector's face.
|
|
56
|
+
|
|
57
|
+
:param pixel_data: sequence of triplets representing rgb values, should be ints from 0-255
|
|
58
|
+
:param image_width: width of the image defined by the pixel_data
|
|
59
|
+
:param image_height: height of the image defined by the pixel_data
|
|
60
|
+
|
|
61
|
+
.. testcode::
|
|
62
|
+
|
|
63
|
+
import anki_vector
|
|
64
|
+
from anki_vector.screen import convert_pixels_to_screen_data
|
|
65
|
+
from PIL import Image
|
|
66
|
+
|
|
67
|
+
image_file = Image.open('../examples/face_images/cozmo_image.jpg')
|
|
68
|
+
image_data = image_file.getdata()
|
|
69
|
+
pixel_bytes = convert_pixels_to_screen_data(image_data, image_file.width, image_file.height)
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
A :class:`bytes` object representing all of the pixels (16bit color in rgb565 format)
|
|
73
|
+
|
|
74
|
+
Raises:
|
|
75
|
+
ValueError: Invalid Dimensions
|
|
76
|
+
ValueError: Bad image_width
|
|
77
|
+
ValueError: Bad image_height
|
|
78
|
+
"""
|
|
79
|
+
if len(pixel_data) != (image_width * image_height):
|
|
80
|
+
raise ValueError('Invalid Dimensions: len(pixel_data) {0} != image_width={1} * image_height={2} (== {3})'. format(len(pixel_data),
|
|
81
|
+
image_width,
|
|
82
|
+
image_height,
|
|
83
|
+
image_width *
|
|
84
|
+
image_height))
|
|
85
|
+
|
|
86
|
+
# @TODO: We should decide on a resampling approach and have this function automatically rescale images
|
|
87
|
+
# We should either enforce the aspect ratio, or have options to:
|
|
88
|
+
# - automatically crop to the proper aspect ratio
|
|
89
|
+
# - stretch to fit
|
|
90
|
+
# - shrink to fit with margins some default color
|
|
91
|
+
if image_width != SCREEN_WIDTH:
|
|
92
|
+
raise ValueError('Bad image_width: image_width {0} must be the resolution width: {1}'. format(image_width, SCREEN_WIDTH))
|
|
93
|
+
|
|
94
|
+
if image_height != SCREEN_HEIGHT:
|
|
95
|
+
raise ValueError('Bad image_height: image_height {0} must be the resolution height: {1}'. format(image_height, SCREEN_HEIGHT))
|
|
96
|
+
|
|
97
|
+
color_565_data = []
|
|
98
|
+
for color_tuple in pixel_data:
|
|
99
|
+
color_object = color.Color(rgb=color_tuple)
|
|
100
|
+
color_565_data.extend(color_object.rgb565_bytepair)
|
|
101
|
+
|
|
102
|
+
return bytes(color_565_data)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def convert_image_to_screen_data(pil_image: Image.Image):
|
|
106
|
+
"""Convert an image into the correct format to display on Vector's face.
|
|
107
|
+
|
|
108
|
+
.. testcode::
|
|
109
|
+
|
|
110
|
+
import anki_vector
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
from PIL import Image
|
|
114
|
+
except ImportError:
|
|
115
|
+
sys.exit("Cannot import from PIL: Do `pip3 install --user Pillow` to install")
|
|
116
|
+
|
|
117
|
+
with anki_vector.Robot() as robot:
|
|
118
|
+
# Load an image
|
|
119
|
+
image_file = Image.open('../examples/face_images/cozmo_image.jpg')
|
|
120
|
+
|
|
121
|
+
# Convert the image to the format used by the Screen
|
|
122
|
+
screen_data = anki_vector.screen.convert_image_to_screen_data(image_file)
|
|
123
|
+
robot.screen.set_screen_with_image_data(screen_data, 4.0)
|
|
124
|
+
|
|
125
|
+
:param pil_image: The image to display on Vector's face
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
A :class:`bytes` object representing all of the pixels (16bit color in rgb565 format)
|
|
129
|
+
"""
|
|
130
|
+
image_data = pil_image.getdata()
|
|
131
|
+
|
|
132
|
+
return convert_pixels_to_screen_data(image_data, pil_image.width, pil_image.height)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class ScreenComponent(util.Component):
|
|
136
|
+
"""Handles messaging to control Vector's screen"""
|
|
137
|
+
|
|
138
|
+
@connection.on_connection_thread(log_messaging=False)
|
|
139
|
+
async def set_screen_with_image_data(self, image_data: bytes, duration_sec: float, interrupt_running: bool = True):
|
|
140
|
+
"""
|
|
141
|
+
Display an image on Vector's Screen (his "face").
|
|
142
|
+
|
|
143
|
+
.. testcode::
|
|
144
|
+
|
|
145
|
+
import anki_vector
|
|
146
|
+
import time
|
|
147
|
+
|
|
148
|
+
try:
|
|
149
|
+
from PIL import Image
|
|
150
|
+
except ImportError:
|
|
151
|
+
sys.exit("Cannot import from PIL: Do `pip3 install --user Pillow` to install")
|
|
152
|
+
|
|
153
|
+
with anki_vector.Robot() as robot:
|
|
154
|
+
# Load an image
|
|
155
|
+
image_file = Image.open('../examples/face_images/cozmo_image.jpg')
|
|
156
|
+
|
|
157
|
+
# Convert the image to the format used by the Screen
|
|
158
|
+
screen_data = anki_vector.screen.convert_image_to_screen_data(image_file)
|
|
159
|
+
|
|
160
|
+
duration_s = 4.0
|
|
161
|
+
robot.screen.set_screen_with_image_data(screen_data, duration_s)
|
|
162
|
+
time.sleep(duration_s)
|
|
163
|
+
|
|
164
|
+
:param image_data: A :class:`bytes` object representing all of the pixels (16bit color in rgb565 format)
|
|
165
|
+
:param duration_sec: The number of seconds the image should remain on Vector's face.
|
|
166
|
+
:param interrupt_running: Set to true so any currently-streaming animation will be aborted in favor of this.
|
|
167
|
+
"""
|
|
168
|
+
if not isinstance(image_data, bytes):
|
|
169
|
+
raise ValueError("set_screen_with_image_data expected bytes")
|
|
170
|
+
if len(image_data) != 35328:
|
|
171
|
+
raise ValueError("set_screen_with_image_data expected 35328 bytes - (2 bytes each for 17664 pixels)")
|
|
172
|
+
|
|
173
|
+
# Generate the message
|
|
174
|
+
message = protocol.DisplayFaceImageRGBRequest()
|
|
175
|
+
# Create byte array at the Screen resolution
|
|
176
|
+
message.face_data = image_data
|
|
177
|
+
message.duration_ms = int(1000 * duration_sec)
|
|
178
|
+
message.interrupt_running = interrupt_running
|
|
179
|
+
|
|
180
|
+
return await self.grpc_interface.DisplayFaceImageRGB(message)
|
|
181
|
+
|
|
182
|
+
def set_screen_to_color(self, solid_color: color.Color, duration_sec: float, interrupt_running: bool = True):
|
|
183
|
+
"""
|
|
184
|
+
Set Vector's Screen (his "face"). to a solid color.
|
|
185
|
+
|
|
186
|
+
.. testcode::
|
|
187
|
+
|
|
188
|
+
import anki_vector
|
|
189
|
+
import time
|
|
190
|
+
|
|
191
|
+
with anki_vector.Robot() as robot:
|
|
192
|
+
duration_s = 4.0
|
|
193
|
+
robot.screen.set_screen_to_color(anki_vector.color.Color(rgb=[255, 128, 0]), duration_sec=duration_s)
|
|
194
|
+
time.sleep(duration_s)
|
|
195
|
+
|
|
196
|
+
:param solid_color: Desired color to set Vector's Screen.
|
|
197
|
+
:param duration_sec: The number of seconds the color should remain on Vector's face.
|
|
198
|
+
:param interrupt_running: Set to true so any currently-streaming animation will be aborted in favor of this.
|
|
199
|
+
"""
|
|
200
|
+
image_data = bytes(solid_color.rgb565_bytepair * 17664)
|
|
201
|
+
return self.set_screen_with_image_data(image_data, duration_sec, interrupt_running)
|
anki_vector/status.py
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
# Copyright (c) 2018 Anki, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License in the file LICENSE.txt or at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""
|
|
16
|
+
.. _status:
|
|
17
|
+
|
|
18
|
+
Robot Status class and exposed properties for Vector's various states.
|
|
19
|
+
|
|
20
|
+
The :class:`RobotStatus` class in this module exposes properties
|
|
21
|
+
about the robot status like :py:attr:`is_charging <RobotStatus.is_charging>`,
|
|
22
|
+
:py:attr:`is_being_held <RobotStatus.is_being_held>`, etc.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
# __all__ should order by constants, event classes, other classes, functions.
|
|
26
|
+
__all__ = ['ROBOT_STATUS_NONE', 'ROBOT_STATUS_ARE_MOTORS_MOVING', 'ROBOT_STATUS_IS_CARRYING_BLOCK',
|
|
27
|
+
'ROBOT_STATUS_IS_DOCKING_TO_MARKER', 'ROBOT_STATUS_IS_PICKED_UP', 'ROBOT_STATUS_IS_BUTTON_PRESSED',
|
|
28
|
+
'ROBOT_STATUS_IS_FALLING', 'ROBOT_STATUS_IS_ANIMATING', 'ROBOT_STATUS_IS_PATHING',
|
|
29
|
+
'ROBOT_STATUS_LIFT_IN_POS', 'ROBOT_STATUS_HEAD_IN_POS', 'ROBOT_STATUS_CALM_POWER_MODE',
|
|
30
|
+
'ROBOT_STATUS_IS_ON_CHARGER', 'ROBOT_STATUS_IS_CHARGING', 'ROBOT_STATUS_CLIFF_DETECTED',
|
|
31
|
+
'ROBOT_STATUS_ARE_WHEELS_MOVING', 'ROBOT_STATUS_IS_BEING_HELD', 'ROBOT_STATUS_IS_ROBOT_MOVING',
|
|
32
|
+
'RobotStatus']
|
|
33
|
+
|
|
34
|
+
from . import util
|
|
35
|
+
from .messaging import protocol
|
|
36
|
+
|
|
37
|
+
ROBOT_STATUS_NONE = protocol.RobotStatus.Value("ROBOT_STATUS_NONE")
|
|
38
|
+
ROBOT_STATUS_ARE_MOTORS_MOVING = protocol.RobotStatus.Value("ROBOT_STATUS_IS_MOVING")
|
|
39
|
+
ROBOT_STATUS_IS_CARRYING_BLOCK = protocol.RobotStatus.Value("ROBOT_STATUS_IS_CARRYING_BLOCK")
|
|
40
|
+
ROBOT_STATUS_IS_DOCKING_TO_MARKER = protocol.RobotStatus.Value("ROBOT_STATUS_IS_PICKING_OR_PLACING")
|
|
41
|
+
ROBOT_STATUS_IS_PICKED_UP = protocol.RobotStatus.Value("ROBOT_STATUS_IS_PICKED_UP")
|
|
42
|
+
ROBOT_STATUS_IS_BUTTON_PRESSED = protocol.RobotStatus.Value("ROBOT_STATUS_IS_BUTTON_PRESSED")
|
|
43
|
+
ROBOT_STATUS_IS_FALLING = protocol.RobotStatus.Value("ROBOT_STATUS_IS_FALLING")
|
|
44
|
+
ROBOT_STATUS_IS_ANIMATING = protocol.RobotStatus.Value("ROBOT_STATUS_IS_ANIMATING")
|
|
45
|
+
ROBOT_STATUS_IS_PATHING = protocol.RobotStatus.Value("ROBOT_STATUS_IS_PATHING")
|
|
46
|
+
ROBOT_STATUS_LIFT_IN_POS = protocol.RobotStatus.Value("ROBOT_STATUS_LIFT_IN_POS")
|
|
47
|
+
ROBOT_STATUS_HEAD_IN_POS = protocol.RobotStatus.Value("ROBOT_STATUS_HEAD_IN_POS")
|
|
48
|
+
ROBOT_STATUS_CALM_POWER_MODE = protocol.RobotStatus.Value("ROBOT_STATUS_CALM_POWER_MODE")
|
|
49
|
+
ROBOT_STATUS_IS_ON_CHARGER = protocol.RobotStatus.Value("ROBOT_STATUS_IS_ON_CHARGER")
|
|
50
|
+
ROBOT_STATUS_IS_CHARGING = protocol.RobotStatus.Value("ROBOT_STATUS_IS_CHARGING")
|
|
51
|
+
ROBOT_STATUS_CLIFF_DETECTED = protocol.RobotStatus.Value("ROBOT_STATUS_CLIFF_DETECTED")
|
|
52
|
+
ROBOT_STATUS_ARE_WHEELS_MOVING = protocol.RobotStatus.Value("ROBOT_STATUS_ARE_WHEELS_MOVING")
|
|
53
|
+
ROBOT_STATUS_IS_BEING_HELD = protocol.RobotStatus.Value("ROBOT_STATUS_IS_BEING_HELD")
|
|
54
|
+
ROBOT_STATUS_IS_ROBOT_MOVING = protocol.RobotStatus.Value("ROBOT_STATUS_IS_MOTION_DETECTED")
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class RobotStatus():
|
|
58
|
+
"""A class to expose various status properties of the robot."""
|
|
59
|
+
|
|
60
|
+
def __init__(self):
|
|
61
|
+
# Default robot status
|
|
62
|
+
self._status: int = None
|
|
63
|
+
|
|
64
|
+
def set(self, status: int):
|
|
65
|
+
self._status = status
|
|
66
|
+
|
|
67
|
+
@util.block_while_none()
|
|
68
|
+
def __get(self) -> int:
|
|
69
|
+
return self._status
|
|
70
|
+
|
|
71
|
+
@property
|
|
72
|
+
def are_motors_moving(self) -> bool:
|
|
73
|
+
"""True if Vector is currently moving any of his motors (head, arm or
|
|
74
|
+
wheels/treads).
|
|
75
|
+
|
|
76
|
+
.. testcode::
|
|
77
|
+
|
|
78
|
+
import anki_vector
|
|
79
|
+
|
|
80
|
+
with anki_vector.Robot() as robot:
|
|
81
|
+
if robot.status.are_motors_moving:
|
|
82
|
+
print("Vector is moving.")
|
|
83
|
+
"""
|
|
84
|
+
return (self.__get() & ROBOT_STATUS_ARE_MOTORS_MOVING) != 0
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def is_carrying_block(self) -> bool:
|
|
88
|
+
"""True if Vector is currently carrying a block.
|
|
89
|
+
|
|
90
|
+
.. testcode::
|
|
91
|
+
|
|
92
|
+
import anki_vector
|
|
93
|
+
|
|
94
|
+
with anki_vector.Robot() as robot:
|
|
95
|
+
if robot.status.is_carrying_block:
|
|
96
|
+
print("Vector is carrying his block.")
|
|
97
|
+
"""
|
|
98
|
+
return (self.__get() & ROBOT_STATUS_IS_CARRYING_BLOCK) != 0
|
|
99
|
+
|
|
100
|
+
@property
|
|
101
|
+
def is_docking_to_marker(self) -> bool:
|
|
102
|
+
"""True if Vector has seen a marker and is actively heading toward it
|
|
103
|
+
(for example his charger or cube).
|
|
104
|
+
|
|
105
|
+
.. testcode::
|
|
106
|
+
|
|
107
|
+
import anki_vector
|
|
108
|
+
|
|
109
|
+
with anki_vector.Robot() as robot:
|
|
110
|
+
if robot.status.is_docking_to_marker:
|
|
111
|
+
print("Vector has found a marker and is docking to it.")
|
|
112
|
+
"""
|
|
113
|
+
return (self.__get() & ROBOT_STATUS_IS_DOCKING_TO_MARKER) != 0
|
|
114
|
+
|
|
115
|
+
@property
|
|
116
|
+
def is_picked_up(self) -> bool:
|
|
117
|
+
"""True if Vector is currently picked up (in the air).
|
|
118
|
+
|
|
119
|
+
If :py:attr:`is_being_held` is true, then :py:attr:`is_picked_up` is always True.
|
|
120
|
+
|
|
121
|
+
:py:attr:`is_picked_up` uses the IMU data to determine if the robot is not on a stable surface with his treads down.
|
|
122
|
+
If the robot is on its side, :py:attr:`is_picked_up` is True.
|
|
123
|
+
|
|
124
|
+
.. testcode::
|
|
125
|
+
|
|
126
|
+
import anki_vector
|
|
127
|
+
|
|
128
|
+
with anki_vector.Robot() as robot:
|
|
129
|
+
if robot.status.is_picked_up:
|
|
130
|
+
print("Vector is picked up.")
|
|
131
|
+
"""
|
|
132
|
+
return (self.__get() & ROBOT_STATUS_IS_PICKED_UP) != 0
|
|
133
|
+
|
|
134
|
+
@property
|
|
135
|
+
def is_button_pressed(self) -> bool:
|
|
136
|
+
"""True if Vector's button is pressed.
|
|
137
|
+
|
|
138
|
+
.. testcode::
|
|
139
|
+
|
|
140
|
+
import anki_vector
|
|
141
|
+
|
|
142
|
+
with anki_vector.Robot() as robot:
|
|
143
|
+
if robot.status.is_button_pressed:
|
|
144
|
+
print("Vector's button was button pressed.")
|
|
145
|
+
"""
|
|
146
|
+
return (self.__get() & ROBOT_STATUS_IS_BUTTON_PRESSED) != 0
|
|
147
|
+
|
|
148
|
+
@property
|
|
149
|
+
def is_falling(self) -> bool:
|
|
150
|
+
"""True if Vector is currently falling.
|
|
151
|
+
|
|
152
|
+
.. testcode::
|
|
153
|
+
|
|
154
|
+
import anki_vector
|
|
155
|
+
|
|
156
|
+
with anki_vector.Robot() as robot:
|
|
157
|
+
if robot.status.is_falling:
|
|
158
|
+
print("Vector is falling.")
|
|
159
|
+
"""
|
|
160
|
+
return (self.__get() & ROBOT_STATUS_IS_FALLING) != 0
|
|
161
|
+
|
|
162
|
+
@property
|
|
163
|
+
def is_animating(self) -> bool:
|
|
164
|
+
"""True if Vector is currently playing an animation.
|
|
165
|
+
|
|
166
|
+
.. testcode::
|
|
167
|
+
|
|
168
|
+
import anki_vector
|
|
169
|
+
|
|
170
|
+
with anki_vector.Robot() as robot:
|
|
171
|
+
if robot.status.is_animating:
|
|
172
|
+
print("Vector is animating.")
|
|
173
|
+
"""
|
|
174
|
+
return (self.__get() & ROBOT_STATUS_IS_ANIMATING) != 0
|
|
175
|
+
|
|
176
|
+
@property
|
|
177
|
+
def is_pathing(self) -> bool:
|
|
178
|
+
"""True if Vector is currently traversing a path.
|
|
179
|
+
|
|
180
|
+
.. testcode::
|
|
181
|
+
|
|
182
|
+
import anki_vector
|
|
183
|
+
|
|
184
|
+
with anki_vector.Robot() as robot:
|
|
185
|
+
if robot.status.is_pathing:
|
|
186
|
+
print("Vector is traversing a path.")
|
|
187
|
+
"""
|
|
188
|
+
return (self.__get() & ROBOT_STATUS_IS_PATHING) != 0
|
|
189
|
+
|
|
190
|
+
@property
|
|
191
|
+
def is_lift_in_pos(self) -> bool:
|
|
192
|
+
"""True if Vector's arm is in the desired position (False if still
|
|
193
|
+
trying to move it there).
|
|
194
|
+
|
|
195
|
+
.. testcode::
|
|
196
|
+
|
|
197
|
+
import anki_vector
|
|
198
|
+
|
|
199
|
+
with anki_vector.Robot() as robot:
|
|
200
|
+
if robot.status.is_lift_in_pos:
|
|
201
|
+
print("Vector's arm is in position.")
|
|
202
|
+
"""
|
|
203
|
+
return (self.__get() & ROBOT_STATUS_LIFT_IN_POS) != 0
|
|
204
|
+
|
|
205
|
+
@property
|
|
206
|
+
def is_head_in_pos(self) -> bool:
|
|
207
|
+
"""True if Vector's head is in the desired position (False if still
|
|
208
|
+
trying to move there).
|
|
209
|
+
|
|
210
|
+
.. testcode::
|
|
211
|
+
|
|
212
|
+
import anki_vector
|
|
213
|
+
|
|
214
|
+
with anki_vector.Robot() as robot:
|
|
215
|
+
if robot.status.is_head_in_pos:
|
|
216
|
+
print("Vector's head is in position.")
|
|
217
|
+
"""
|
|
218
|
+
return (self.__get() & ROBOT_STATUS_HEAD_IN_POS) != 0
|
|
219
|
+
|
|
220
|
+
@property
|
|
221
|
+
def is_in_calm_power_mode(self) -> bool:
|
|
222
|
+
"""True if Vector is in calm power mode. Calm power mode is generally
|
|
223
|
+
when Vector is sleeping or charging.
|
|
224
|
+
|
|
225
|
+
.. testcode::
|
|
226
|
+
|
|
227
|
+
import anki_vector
|
|
228
|
+
|
|
229
|
+
with anki_vector.Robot() as robot:
|
|
230
|
+
if robot.status.is_in_calm_power_mode:
|
|
231
|
+
print("Vector is in calm power mode.")
|
|
232
|
+
"""
|
|
233
|
+
return (self.__get() & ROBOT_STATUS_CALM_POWER_MODE) != 0
|
|
234
|
+
|
|
235
|
+
@property
|
|
236
|
+
def is_on_charger(self) -> bool:
|
|
237
|
+
"""True if Vector is currently on the charger.
|
|
238
|
+
|
|
239
|
+
.. testcode::
|
|
240
|
+
|
|
241
|
+
import anki_vector
|
|
242
|
+
|
|
243
|
+
with anki_vector.Robot() as robot:
|
|
244
|
+
if robot.status.is_on_charger:
|
|
245
|
+
print("Vector is on the charger.")
|
|
246
|
+
"""
|
|
247
|
+
return (self.__get() & ROBOT_STATUS_IS_ON_CHARGER) != 0
|
|
248
|
+
|
|
249
|
+
@property
|
|
250
|
+
def is_charging(self) -> bool:
|
|
251
|
+
"""True if Vector is currently charging.
|
|
252
|
+
|
|
253
|
+
.. testcode::
|
|
254
|
+
|
|
255
|
+
import anki_vector
|
|
256
|
+
|
|
257
|
+
with anki_vector.Robot() as robot:
|
|
258
|
+
if robot.status.is_charging:
|
|
259
|
+
print("Vector is currently charging.")
|
|
260
|
+
"""
|
|
261
|
+
return (self.__get() & ROBOT_STATUS_IS_CHARGING) != 0
|
|
262
|
+
|
|
263
|
+
@property
|
|
264
|
+
def is_cliff_detected(self) -> bool:
|
|
265
|
+
"""True if Vector detected a cliff using any of his four cliff sensors.
|
|
266
|
+
|
|
267
|
+
.. testcode::
|
|
268
|
+
|
|
269
|
+
import anki_vector
|
|
270
|
+
|
|
271
|
+
with anki_vector.Robot() as robot:
|
|
272
|
+
if robot.status.is_cliff_detected:
|
|
273
|
+
print("Vector has detected a cliff.")
|
|
274
|
+
"""
|
|
275
|
+
return (self.__get() & ROBOT_STATUS_CLIFF_DETECTED) != 0
|
|
276
|
+
|
|
277
|
+
@property
|
|
278
|
+
def are_wheels_moving(self) -> bool:
|
|
279
|
+
"""True if Vector's wheels/treads are currently moving.
|
|
280
|
+
|
|
281
|
+
.. testcode::
|
|
282
|
+
|
|
283
|
+
import anki_vector
|
|
284
|
+
|
|
285
|
+
with anki_vector.Robot() as robot:
|
|
286
|
+
if robot.status.are_wheels_moving:
|
|
287
|
+
print("Vector's wheels are moving.")
|
|
288
|
+
"""
|
|
289
|
+
return (self.__get() & ROBOT_STATUS_ARE_WHEELS_MOVING) != 0
|
|
290
|
+
|
|
291
|
+
@property
|
|
292
|
+
def is_being_held(self) -> bool:
|
|
293
|
+
"""True if Vector is being held.
|
|
294
|
+
|
|
295
|
+
:py:attr:`is_being_held` uses the IMU to look for tiny motions
|
|
296
|
+
that suggest the robot is actively being held in someone's hand.
|
|
297
|
+
|
|
298
|
+
.. testcode::
|
|
299
|
+
|
|
300
|
+
import anki_vector
|
|
301
|
+
|
|
302
|
+
with anki_vector.Robot() as robot:
|
|
303
|
+
if robot.status.is_being_held:
|
|
304
|
+
print("Vector is being held.")
|
|
305
|
+
"""
|
|
306
|
+
return (self.__get() & ROBOT_STATUS_IS_BEING_HELD) != 0
|
|
307
|
+
|
|
308
|
+
@property
|
|
309
|
+
def is_robot_moving(self) -> bool:
|
|
310
|
+
"""True if Vector is in motion. This includes any of his motors
|
|
311
|
+
(head, arm, wheels/tracks) and if he is being lifted, carried,
|
|
312
|
+
or falling.
|
|
313
|
+
|
|
314
|
+
.. testcode::
|
|
315
|
+
|
|
316
|
+
import anki_vector
|
|
317
|
+
|
|
318
|
+
with anki_vector.Robot() as robot:
|
|
319
|
+
if robot.status.is_robot_moving:
|
|
320
|
+
print("Vector has is in motion.")
|
|
321
|
+
"""
|
|
322
|
+
return (self.__get() & ROBOT_STATUS_IS_ROBOT_MOVING) != 0
|
anki_vector/touch.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# Copyright (c) 2018 Anki, Inc.
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License in the file LICENSE.txt or at
|
|
6
|
+
#
|
|
7
|
+
# https://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
"""
|
|
16
|
+
Support for Vector's touch sensor.
|
|
17
|
+
|
|
18
|
+
The robot will forward a raw sensor reading representing the capacitance detected
|
|
19
|
+
on its back sensor. Accompanied with this value is a true/false flag that takes into
|
|
20
|
+
account other aspects of the robot's state to evaluate whether the robot thinks it is
|
|
21
|
+
being touched or not. This flag is the same value used internally for petting detection.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
# __all__ should order by constants, event classes, other classes, functions.
|
|
25
|
+
__all__ = ["TouchComponent", "TouchSensorData"]
|
|
26
|
+
|
|
27
|
+
from . import util
|
|
28
|
+
from .events import Events
|
|
29
|
+
from .messaging import protocol
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class TouchSensorData:
|
|
33
|
+
"""A touch sample from the capacitive touch sensor, accompanied with the robot's
|
|
34
|
+
conclusion on whether this is considered a valid touch.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self, proto_data: protocol.TouchData):
|
|
38
|
+
self._raw_touch_value = proto_data.raw_touch_value
|
|
39
|
+
self._is_being_touched = proto_data.is_being_touched
|
|
40
|
+
|
|
41
|
+
@property
|
|
42
|
+
def raw_touch_value(self) -> int:
|
|
43
|
+
"""The detected sensitivity from the touch sensor.
|
|
44
|
+
|
|
45
|
+
This will not map to a constant raw value, as it may be impacted by various
|
|
46
|
+
environmental factors such as whether the robot is on its charger, being held, humidity, etc.
|
|
47
|
+
|
|
48
|
+
.. testcode::
|
|
49
|
+
|
|
50
|
+
import anki_vector
|
|
51
|
+
|
|
52
|
+
with anki_vector.Robot() as robot:
|
|
53
|
+
touch_data = robot.touch.last_sensor_reading
|
|
54
|
+
if touch_data is not None:
|
|
55
|
+
raw_touch_value = touch_data.raw_touch_value
|
|
56
|
+
"""
|
|
57
|
+
return self._raw_touch_value
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def is_being_touched(self) -> bool:
|
|
61
|
+
"""The robot's conclusion on whether the current value is considered
|
|
62
|
+
a valid touch.
|
|
63
|
+
|
|
64
|
+
.. testcode::
|
|
65
|
+
|
|
66
|
+
import anki_vector
|
|
67
|
+
|
|
68
|
+
with anki_vector.Robot() as robot:
|
|
69
|
+
touch_data = robot.touch.last_sensor_reading
|
|
70
|
+
if touch_data is not None:
|
|
71
|
+
is_being_touched = touch_data.is_being_touched
|
|
72
|
+
"""
|
|
73
|
+
return self._is_being_touched
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
class TouchComponent(util.Component):
|
|
77
|
+
"""Maintains the most recent touch sensor data
|
|
78
|
+
|
|
79
|
+
This will be updated with every broadcast RobotState, and can be queried at any time.
|
|
80
|
+
|
|
81
|
+
.. testcode::
|
|
82
|
+
|
|
83
|
+
import anki_vector
|
|
84
|
+
|
|
85
|
+
with anki_vector.Robot() as robot:
|
|
86
|
+
touch_data = robot.touch.last_sensor_reading
|
|
87
|
+
if touch_data is not None:
|
|
88
|
+
print('Touch sensor value: {0}, is being touched: {1}'.format(touch_data.raw_touch_value, touch_data.is_being_touched))
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
def __init__(self, robot):
|
|
92
|
+
super().__init__(robot)
|
|
93
|
+
self._last_sensor_reading = None
|
|
94
|
+
|
|
95
|
+
# Subscribe to a callback that updates the robot's local properties - which includes touch data.
|
|
96
|
+
self._robot.events.subscribe(self._on_robot_state,
|
|
97
|
+
Events.robot_state,
|
|
98
|
+
_on_connection_thread=True)
|
|
99
|
+
|
|
100
|
+
def close(self):
|
|
101
|
+
"""Closing the touch component will unsubscribe from robot state updates."""
|
|
102
|
+
self._robot.events.unsubscribe(self._on_robot_state,
|
|
103
|
+
Events.robot_state)
|
|
104
|
+
|
|
105
|
+
@property
|
|
106
|
+
def last_sensor_reading(self) -> TouchSensorData:
|
|
107
|
+
""":class:`anki_vector.touch.TouchSensorData`: The last reported sensor data.
|
|
108
|
+
|
|
109
|
+
.. testcode::
|
|
110
|
+
|
|
111
|
+
import anki_vector
|
|
112
|
+
|
|
113
|
+
with anki_vector.Robot() as robot:
|
|
114
|
+
touch_data = robot.touch.last_sensor_reading
|
|
115
|
+
"""
|
|
116
|
+
return self._last_sensor_reading
|
|
117
|
+
|
|
118
|
+
def _on_robot_state(self, _robot, _event_type, msg):
|
|
119
|
+
self._last_sensor_reading = TouchSensorData(msg.touch_data)
|