bosdyn-client 5.0.1.1__py3-none-any.whl → 5.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. bosdyn/client/access_controlled_door_util.py +206 -0
  2. bosdyn/client/arm_surface_contact.py +2 -2
  3. bosdyn/client/async_tasks.py +3 -2
  4. bosdyn/client/audio_visual.py +6 -7
  5. bosdyn/client/audio_visual_helpers.py +3 -2
  6. bosdyn/client/autowalk.py +0 -2
  7. bosdyn/client/command_line.py +61 -4
  8. bosdyn/client/common.py +1 -1
  9. bosdyn/client/data_acquisition.py +3 -5
  10. bosdyn/client/data_acquisition_helpers.py +0 -3
  11. bosdyn/client/data_acquisition_plugin.py +1 -2
  12. bosdyn/client/data_acquisition_plugin_service.py +3 -2
  13. bosdyn/client/data_acquisition_store.py +1 -7
  14. bosdyn/client/data_buffer.py +5 -4
  15. bosdyn/client/directory_registration.py +3 -2
  16. bosdyn/client/estop.py +3 -2
  17. bosdyn/client/fault.py +1 -1
  18. bosdyn/client/gps/aggregator_client.py +2 -4
  19. bosdyn/client/gps/gps_listener.py +5 -7
  20. bosdyn/client/gps/ntrip_client.py +12 -3
  21. bosdyn/client/graph_nav.py +67 -13
  22. bosdyn/client/hazard_avoidance.py +119 -0
  23. bosdyn/client/image.py +5 -4
  24. bosdyn/client/image_service_helpers.py +6 -7
  25. bosdyn/client/ir_enable_disable.py +1 -1
  26. bosdyn/client/keepalive.py +4 -2
  27. bosdyn/client/lease.py +3 -2
  28. bosdyn/client/lease_validator.py +0 -1
  29. bosdyn/client/log_status.py +57 -3
  30. bosdyn/client/map_processing.py +2 -4
  31. bosdyn/client/network_compute_bridge_client.py +4 -6
  32. bosdyn/client/payload.py +2 -3
  33. bosdyn/client/payload_registration.py +11 -10
  34. bosdyn/client/power.py +84 -27
  35. bosdyn/client/processors.py +27 -2
  36. bosdyn/client/recording.py +3 -3
  37. bosdyn/client/robot_command.py +22 -22
  38. bosdyn/client/robot_state.py +1 -1
  39. bosdyn/client/sdk.py +2 -3
  40. bosdyn/client/service_customization_helpers.py +1 -1
  41. bosdyn/client/spot_cam/audio.py +1 -2
  42. bosdyn/client/spot_cam/health.py +1 -1
  43. bosdyn/client/spot_cam/lighting.py +1 -1
  44. bosdyn/client/spot_cam/media_log.py +1 -1
  45. bosdyn/client/spot_cam/network.py +3 -2
  46. bosdyn/client/spot_cam/power.py +1 -1
  47. bosdyn/client/spot_cam/ptz.py +1 -1
  48. bosdyn/client/spot_cam/streamquality.py +1 -1
  49. bosdyn/client/spot_cam/version.py +1 -1
  50. bosdyn/client/spot_check.py +5 -6
  51. bosdyn/client/url_validation_util.py +220 -0
  52. bosdyn/client/util.py +2 -4
  53. bosdyn/client/world_object.py +1 -1
  54. {bosdyn_client-5.0.1.1.dist-info → bosdyn_client-5.1.0.dist-info}/METADATA +3 -3
  55. bosdyn_client-5.1.0.dist-info/RECORD +106 -0
  56. bosdyn_client-5.0.1.1.dist-info/RECORD +0 -103
  57. {bosdyn_client-5.0.1.1.dist-info → bosdyn_client-5.1.0.dist-info}/WHEEL +0 -0
  58. {bosdyn_client-5.0.1.1.dist-info → bosdyn_client-5.1.0.dist-info}/top_level.txt +0 -0
@@ -11,15 +11,13 @@ import socket
11
11
  import time
12
12
  from typing import List
13
13
 
14
- import bosdyn.api
15
- import bosdyn.client.util
16
14
  from bosdyn.api.gps.gps_pb2 import GpsDataPoint, GpsDevice
17
15
  from bosdyn.client.exceptions import ProxyConnectionError
18
16
  from bosdyn.client.gps.aggregator_client import AggregatorClient
19
17
  from bosdyn.client.gps.NMEAParser import NMEAParser
20
18
  from bosdyn.client.gps.ntrip_client import NtripClient, NtripClientParams
21
19
  from bosdyn.client.robot import UnregisteredServiceNameError
22
- from bosdyn.util import RobotTimeConverter, duration_to_seconds
20
+ from bosdyn.util import RobotTimeConverter, now_sec
23
21
 
24
22
 
25
23
  class NMEAStreamReader(object):
@@ -44,7 +42,7 @@ class NMEAStreamReader(object):
44
42
  raw_data = str(raw_data, "utf-8")
45
43
  except UnicodeDecodeError:
46
44
  # Throttle the logs.
47
- now = time.time()
45
+ now = now_sec()
48
46
  if self.last_failed_read_log_time is None or (
49
47
  now - self.last_failed_read_log_time) > self.LOG_THROTTLE_TIME:
50
48
  self.logger.exception("Failed to decode NMEA message. Is it not Unicode?")
@@ -160,17 +158,17 @@ class GpsListener:
160
158
  agg_future = self.aggregator_client.new_gps_data_async(
161
159
  accumulated_data, self.gps_device)
162
160
  accumulated_data.clear()
163
- timestamp_of_last_rpc = time.time()
161
+ timestamp_of_last_rpc = now_sec()
164
162
  time_passed_since_last_rpc = 0
165
163
  else:
166
164
  if time_passed_since_last_rpc > every_x_seconds:
167
165
  if agg_future is None or agg_future.done():
168
166
  agg_future = self.aggregator_client.new_gps_data_async([],
169
167
  self.gps_device)
170
- timestamp_of_last_rpc = time.time()
168
+ timestamp_of_last_rpc = now_sec()
171
169
  time_passed_since_last_rpc = 0
172
170
  else:
173
- time_passed_since_last_rpc = time.time() - timestamp_of_last_rpc
171
+ time_passed_since_last_rpc = now_sec() - timestamp_of_last_rpc
174
172
 
175
173
  # If we are running an NTRIP client, pass it the latest GGA message.
176
174
  if self.ntrip_client is not None:
@@ -25,7 +25,7 @@ class NtripClientParams:
25
25
  """
26
26
 
27
27
  def __init__(self, server=DEFAULT_NTRIP_SERVER, port=DEFAULT_NTRIP_PORT, user="", password="",
28
- mountpoint="", tls=False):
28
+ mountpoint="", tls=False, reconnect_secs=SERVER_RECONNECT_DELAY):
29
29
  """
30
30
  Constructor.
31
31
  """
@@ -35,6 +35,7 @@ class NtripClientParams:
35
35
  self.password = password
36
36
  self.mountpoint = mountpoint
37
37
  self.tls = tls
38
+ self.reconnect_secs = reconnect_secs
38
39
 
39
40
 
40
41
  class NtripClient:
@@ -54,6 +55,7 @@ class NtripClient:
54
55
  self.password = params.password
55
56
  self.mountpoint = params.mountpoint
56
57
  self.tls = params.tls
58
+ self.reconnect_secs = params.reconnect_secs
57
59
 
58
60
  self.thread = None
59
61
  self.streaming = False
@@ -143,9 +145,9 @@ class NtripClient:
143
145
  self.logger.info(response_lines[0].decode())
144
146
  if status[1] != "200":
145
147
  self.logger.error("HTTP Error: %s, retrying in %d seconds",
146
- response_lines[0].decode(), SERVER_RECONNECT_DELAY)
148
+ response_lines[0].decode(), self.reconnect_secs)
147
149
  sock.close()
148
- time.sleep(SERVER_RECONNECT_DELAY)
150
+ time.sleep(self.reconnect_secs)
149
151
  return False
150
152
  self.logger.info("NTRIP Request Response received.")
151
153
  for line in range(1, len(response_lines)):
@@ -218,6 +220,13 @@ class NtripClient:
218
220
  self.streaming = True
219
221
  while self.streaming:
220
222
  self.stream_data()
223
+ # stream_data blocks for the lifetime of a connection. If it returns while we are still
224
+ # expecting to be streaming, it means the server disconnected from us. Wait for a delay
225
+ # before attempting to reconnect to avoid spamming the server.
226
+ if self.streaming:
227
+ self.logger.info(
228
+ f"Attempting to reconnect to NTRIP server in {self.reconnect_secs} seconds.")
229
+ time.sleep(self.reconnect_secs)
221
230
 
222
231
  def handle_ntrip_data(self, data):
223
232
  """
@@ -13,14 +13,14 @@ import time
13
13
  from deprecated.sphinx import deprecated
14
14
 
15
15
  from bosdyn.api import data_chunk_pb2, lease_pb2
16
- from bosdyn.api.graph_nav import (graph_nav_pb2, graph_nav_service_pb2, graph_nav_service_pb2_grpc,
17
- map_pb2, nav_pb2)
16
+ from bosdyn.api.graph_nav import graph_nav_pb2, graph_nav_service_pb2_grpc, map_pb2, nav_pb2
18
17
  from bosdyn.client.common import (BaseClient, common_header_errors, common_lease_errors,
19
18
  error_factory, error_pair, handle_common_header_errors,
20
19
  handle_lease_use_result_errors, handle_license_errors_if_present,
21
20
  handle_unset_status_error)
22
- from bosdyn.client.exceptions import Error, InvalidRequestError, ResponseError, UnimplementedError
21
+ from bosdyn.client.exceptions import ResponseError, UnimplementedError
23
22
  from bosdyn.client.lease import add_lease_wallet_processors
23
+ from bosdyn.util import now_sec
24
24
 
25
25
 
26
26
  class GraphNavClient(BaseClient):
@@ -476,13 +476,15 @@ class GraphNavClient(BaseClient):
476
476
  error_from_response=handle_common_header_errors(common_lease_errors),
477
477
  copy_request=False, **kwargs)
478
478
 
479
- def upload_graph(self, lease=None, graph=None, generate_new_anchoring=False, **kwargs):
479
+ def upload_graph(self, lease=None, graph=None, generate_new_anchoring=False,
480
+ replace_graph=False, **kwargs):
480
481
  """Uploads a graph to the server and appends to the existing graph.
481
482
 
482
483
  Args:
483
484
  lease: Leases to show ownership of necessary resources. Will use the client's leases by default.
484
485
  graph: Graph protobuf that represents the map with waypoints and edges.
485
486
  generate_new_anchoring: Whether to generate an (overwrite the) anchoring on upload.
487
+ replace_graph: If true, replaces the existing graph with the new one rather than adding to it.
486
488
  Returns:
487
489
  The response, which includes waypoint and edge id's sorted by whether it was cached.
488
490
  Raises:
@@ -493,7 +495,8 @@ class GraphNavClient(BaseClient):
493
495
  LeaseUseError: Error using provided lease.
494
496
  LicenseError: The robot's license is not valid.
495
497
  """
496
- request = self._build_upload_graph_request(lease, graph, generate_new_anchoring)
498
+ request = self._build_upload_graph_request(lease, graph, generate_new_anchoring,
499
+ replace_graph)
497
500
  # Use streaming to upload the graph, if applicable.
498
501
  if self._use_streaming_graph_upload:
499
502
  # Need to manually apply request processors since this will be serialized and chunked.
@@ -509,14 +512,17 @@ class GraphNavClient(BaseClient):
509
512
  except UnimplementedError:
510
513
  print('UploadGraphStreaming unimplemented. Old robot release?')
511
514
  # Recreate the request so that we clear any state that might have happened during our attempt to stream.
512
- request = self._build_upload_graph_request(lease, graph, generate_new_anchoring)
515
+ request = self._build_upload_graph_request(lease, graph, generate_new_anchoring,
516
+ replace_graph)
513
517
  # Continue to regular UploadGraph.
514
518
  return self.call(self._stub.UploadGraph, request, value_from_response=_get_response,
515
519
  error_from_response=_upload_graph_error, copy_request=False, **kwargs)
516
520
 
517
- def upload_graph_async(self, lease=None, graph=None, generate_new_anchoring=False, **kwargs):
521
+ def upload_graph_async(self, lease=None, graph=None, generate_new_anchoring=False,
522
+ replace_graph=False, **kwargs):
518
523
  """Async version of upload_graph()."""
519
- request = self._build_upload_graph_request(lease, graph, generate_new_anchoring)
524
+ request = self._build_upload_graph_request(lease, graph, generate_new_anchoring,
525
+ replace_graph)
520
526
  return self.call_async(self._stub.UploadGraph, request, value_from_response=_get_response,
521
527
  error_from_response=_upload_graph_error, copy_request=False,
522
528
  **kwargs)
@@ -562,6 +568,32 @@ class GraphNavClient(BaseClient):
562
568
  value_from_response=None,
563
569
  error_from_response=handle_common_header_errors(common_lease_errors), **kwargs)
564
570
 
571
+ def upload_snapshots(self, snapshots, lease=None, **kwargs):
572
+ """Uploads multiple snapshots as a stream.
573
+
574
+ graph_nav only processes complete Snapshots so large protos are discouraged;
575
+ any network interruption would require the data to be resent. Clients are
576
+ encouraged to send data in batches on the order of a few MB to strike a
577
+ balance between eliminating per-RPC overhead and recovering from errors.
578
+
579
+ Args:
580
+ lease: Leases to show ownership of necessary resources. Will use the client's leases by default.
581
+ snapshots: UploadSnapshotsRequest.Snapshots protobuf that will be stream-uploaded to the robot.
582
+ Returns:
583
+ The status of the upload request.
584
+ Raises:
585
+ RpcError: Problem communicating with the robot.
586
+ LeaseUseError: Error using provided leases.
587
+ """
588
+ lease = lease or lease_pb2.Lease()
589
+ serialized = snapshots.SerializeToString()
590
+ self.call(
591
+ self._stub.UploadSnapshots,
592
+ GraphNavClient._data_chunk_iterator_upload_snapshots(serialized, lease,
593
+ self._data_chunk_size),
594
+ value_from_response=None,
595
+ error_from_response=handle_common_header_errors(common_lease_errors), **kwargs)
596
+
565
597
  def download_graph(self, **kwargs):
566
598
  """Downloads the graph from the server.
567
599
 
@@ -724,7 +756,7 @@ class GraphNavClient(BaseClient):
724
756
  if travel_params is not None:
725
757
  request.travel_params.CopyFrom(travel_params)
726
758
  request.end_time.CopyFrom(
727
- converter.robot_timestamp_from_local_secs(time.time() + end_time_secs))
759
+ converter.robot_timestamp_from_local_secs(now_sec() + end_time_secs))
728
760
  if command_id is not None:
729
761
  request.command_id = command_id
730
762
  return request
@@ -739,7 +771,7 @@ class GraphNavClient(BaseClient):
739
771
  destination_waypoint_tform_body_goal=destination_waypoint_tform_body_goal,
740
772
  clock_identifier=timesync_endpoint.clock_identifier)
741
773
  request.end_time.CopyFrom(
742
- converter.robot_timestamp_from_local_secs(time.time() + end_time_secs))
774
+ converter.robot_timestamp_from_local_secs(now_sec() + end_time_secs))
743
775
  if travel_params is not None:
744
776
  request.travel_params.CopyFrom(travel_params)
745
777
  if route_params is not None:
@@ -764,7 +796,7 @@ class GraphNavClient(BaseClient):
764
796
  if gps_navigation_params:
765
797
  request.gps_navigation_params.CopyFrom(gps_navigation_params)
766
798
  request.end_time.CopyFrom(
767
- converter.robot_timestamp_from_local_secs(time.time() + end_time_secs))
799
+ converter.robot_timestamp_from_local_secs(now_sec() + end_time_secs))
768
800
  if travel_params is not None:
769
801
  request.travel_params.CopyFrom(travel_params)
770
802
  if route_params is not None:
@@ -783,10 +815,11 @@ class GraphNavClient(BaseClient):
783
815
  return graph_nav_pb2.NavigationFeedbackRequest(command_id=command_id)
784
816
 
785
817
  @staticmethod
786
- def _build_upload_graph_request(lease, graph, generate_new_anchoring):
818
+ def _build_upload_graph_request(lease, graph, generate_new_anchoring, replace_graph):
787
819
  lease = lease or lease_pb2.Lease()
788
820
  return graph_nav_pb2.UploadGraphRequest(lease=lease, graph=graph,
789
- generate_new_anchoring=generate_new_anchoring)
821
+ generate_new_anchoring=generate_new_anchoring,
822
+ replace_graph=replace_graph)
790
823
 
791
824
  @staticmethod
792
825
  def _data_chunk_iterator_upload_graph(serialized_upload_graph, data_chunk_byte_size):
@@ -836,6 +869,27 @@ class GraphNavClient(BaseClient):
836
869
  req = graph_nav_pb2.UploadEdgeSnapshotRequest(lease=lease, chunk=chunk)
837
870
  yield req
838
871
 
872
+ @staticmethod
873
+ def _data_chunk_iterator_upload_snapshots(serialized_snapshots, lease, data_chunk_byte_size):
874
+ total_bytes_size = len(serialized_snapshots)
875
+ # If the snapshots are empty, still send one empty request.
876
+ # This is used to probe if the RPC is implemented.
877
+ if 0 == total_bytes_size:
878
+ req = graph_nav_pb2.UploadSnapshotsRequest(lease=lease)
879
+ yield req
880
+
881
+ num_chunks = math.ceil(total_bytes_size / data_chunk_byte_size)
882
+ for i in range(num_chunks):
883
+ start_index = i * data_chunk_byte_size
884
+ end_index = (i + 1) * data_chunk_byte_size
885
+ chunk = data_chunk_pb2.DataChunk(total_size=total_bytes_size)
886
+ if (end_index > total_bytes_size):
887
+ chunk.data = serialized_snapshots[start_index:total_bytes_size]
888
+ else:
889
+ chunk.data = serialized_snapshots[start_index:end_index]
890
+ req = graph_nav_pb2.UploadSnapshotsRequest(lease=lease, chunk=chunk)
891
+ yield req
892
+
839
893
  @staticmethod
840
894
  def _build_download_graph_request():
841
895
  return graph_nav_pb2.DownloadGraphRequest()
@@ -0,0 +1,119 @@
1
+ # Copyright (c) 2023 Boston Dynamics, Inc. All rights reserved.
2
+ #
3
+ # Downloading, reproducing, distributing or otherwise using the SDK Software
4
+ # is subject to the terms and conditions of the Boston Dynamics Software
5
+ # Development Kit License (20191101-BDSDK-SL).
6
+
7
+ """For clients to use the hazard_avoidance service"""
8
+
9
+ import collections
10
+
11
+ from bosdyn.api import hazard_avoidance_pb2
12
+ from bosdyn.api import hazard_avoidance_service_pb2_grpc as hazard_avoidance_service
13
+ from bosdyn.client.common import (BaseClient, custom_params_error, error_factory, error_pair,
14
+ handle_common_header_errors)
15
+ from bosdyn.client.exceptions import InvalidRequestError, ResponseError, UnsetStatusError
16
+ from bosdyn.client.robot_command import NoTimeSyncError
17
+ from bosdyn.client.time_sync import update_timestamp_filter
18
+
19
+
20
+ class AddHazardsResponseError(ResponseError):
21
+ """General class of errors for hazard avoidance service."""
22
+
23
+
24
+ _ADD_HAZARD_STATUS_TO_ERROR = collections.defaultdict(lambda: (AddHazardsResponseError, None))
25
+ _ADD_HAZARD_STATUS_TO_ERROR.update({
26
+ hazard_avoidance_pb2.AddHazardResult.STATUS_HAZARDS_UPDATED: (None, None),
27
+ hazard_avoidance_pb2.AddHazardResult.STATUS_IGNORED: (None, None),
28
+ hazard_avoidance_pb2.AddHazardResult.STATUS_INVALID_DATA: error_pair(InvalidRequestError),
29
+ hazard_avoidance_pb2.AddHazardResult.STATUS_UNKNOWN: error_pair(UnsetStatusError),
30
+ })
31
+
32
+
33
+ @handle_common_header_errors
34
+ def _error_from_response(response):
35
+ """Return a custom exception based on the first invalid add hazard result, None if no error."""
36
+ for add_hazard_result in response.add_hazard_results:
37
+ result = custom_params_error(add_hazard_result, total_response=response)
38
+ if result is not None:
39
+ return result
40
+
41
+ result = error_factory(response, add_hazard_result.status,
42
+ status_to_string=hazard_avoidance_pb2.AddHazardResult.Status.Name,
43
+ status_to_error=_ADD_HAZARD_STATUS_TO_ERROR)
44
+ if result is not None:
45
+ # The exception is using the add hazards result. Replace it with the full response.
46
+ result.response = response
47
+ return result
48
+ return None
49
+
50
+
51
+ def _get_add_hazards_value(response):
52
+ return response.add_hazard_results
53
+
54
+
55
+ class HazardAvoidanceClient(BaseClient):
56
+ """Client for Hazard avoidance service."""
57
+ default_service_name = 'hazard-avoidance-service'
58
+ service_type = 'bosdyn.api.HazardAvoidanceService'
59
+
60
+ def __init__(self):
61
+ super(HazardAvoidanceClient,
62
+ self).__init__(hazard_avoidance_service.HazardAvoidanceServiceStub)
63
+ self._timesync_endpoint = None
64
+
65
+ def update_from(self, other):
66
+ super(HazardAvoidanceClient, self).update_from(other)
67
+ # Grab a timesync endpoint if it is available.
68
+ try:
69
+ self._timesync_endpoint = other.time_sync.endpoint
70
+ except AttributeError:
71
+ pass # other doesn't have a time_sync accessor
72
+
73
+ @property
74
+ def timesync_endpoint(self):
75
+ """Accessor for timesync-endpoint that is grabbed via 'update_from()'.
76
+
77
+ Raises:
78
+ bosdyn.client.robot_command.NoTimeSyncError: Could not find the timesync endpoint for
79
+ the robot.
80
+ """
81
+ if not self._timesync_endpoint:
82
+ raise NoTimeSyncError("[world object service] No timesync endpoint set for the robot")
83
+ return self._timesync_endpoint
84
+
85
+ def add_hazards(self, add_hazards_req, **kwargs):
86
+ """Add hazards to the hazard map.
87
+
88
+ Args:
89
+ add_hazards_req (hazard_avoidance_pb2.AddHazardsRequest): The request including the hazard observations to add.
90
+ Returns:
91
+ response (hazard_avoidance_pb2.AddHazardsResponse): Contains the status of adding each observation,
92
+
93
+ Raises:
94
+ RpcError: Problem communicating with the robot.
95
+ bosdyn.client.robot_command.NoTimeSyncError: Couldn't convert the timestamp into robot
96
+ time.
97
+ UnsetStatusError: An internal HazardAvoidanceService issue has happened.
98
+ AddHazardsResponseError: General problem with the request.
99
+ bosdyn.client.exceptions.InvalidRequestError: One or more hazard observations contained errors.
100
+ """
101
+ for hazard_obs in add_hazards_req.hazards:
102
+ if hazard_obs.HasField("acquisition_time"):
103
+ # Ensure the hazard observation's time of detection is in robot time.
104
+ client_timestamp = hazard_obs.acquisition_time
105
+ hazard_obs.acquisition_time.CopyFrom(
106
+ update_timestamp_filter(self, client_timestamp, self.timesync_endpoint))
107
+ return self.call(self._stub.AddHazards, add_hazards_req, _get_add_hazards_value,
108
+ _error_from_response, copy_request=False, **kwargs)
109
+
110
+ def add_hazards_async(self, add_hazards_req, **kwargs):
111
+ """Async version of add_hazards()."""
112
+ for hazard_obs in add_hazards_req.hazards:
113
+ if hazard_obs.HasField("acquisition_time"):
114
+ # Ensure the hazard observation's time of detection is in robot time.
115
+ client_timestamp = hazard_obs.acquisition_time
116
+ hazard_obs.acquisition_time.CopyFrom(
117
+ update_timestamp_filter(self, client_timestamp, self.timesync_endpoint))
118
+ return self.call_async(self._stub.AddHazards, add_hazards_req, _get_add_hazards_value,
119
+ _error_from_response, copy_request=False, **kwargs)
bosdyn/client/image.py CHANGED
@@ -231,12 +231,13 @@ def write_pgm_or_ppm(image_response, filename="", filepath=".", include_pixel_fo
231
231
  a filename ("image-{SOURCENAME}-{PIXELFORMAT}.pgm").
232
232
  """
233
233
  # Determine the data type to decode the image.
234
+ remote_dtype = pixel_format_to_numpy_type(image_response.shot.image.pixel_format)
234
235
  if image_response.shot.image.pixel_format in (image_pb2.Image.PIXEL_FORMAT_DEPTH_U16,
235
236
  image_pb2.Image.PIXEL_FORMAT_GREYSCALE_U16):
236
- dtype = np.uint16
237
+ local_dtype = np.uint16
237
238
  max_val = np.iinfo(np.uint16).max
238
239
  else:
239
- dtype = np.uint8
240
+ local_dtype = np.uint8
240
241
  max_val = np.iinfo(np.uint8).max
241
242
 
242
243
  num_channels = 1
@@ -259,11 +260,11 @@ def write_pgm_or_ppm(image_response, filename="", filepath=".", include_pixel_fo
259
260
  image_pb2.Image.PixelFormat.Name(image_response.shot.image.pixel_format))
260
261
  return
261
262
 
262
- img = np.frombuffer(image_response.shot.image.data, dtype=dtype)
263
+ img = np.frombuffer(image_response.shot.image.data, dtype=remote_dtype)
263
264
  height = image_response.shot.image.rows
264
265
  width = image_response.shot.image.cols
265
266
  try:
266
- img = img.reshape((height, width, num_channels))
267
+ img = img.astype(local_dtype).reshape((height, width, num_channels))
267
268
  except ValueError as err:
268
269
  print(
269
270
  "Cannot convert raw image into expected shape (rows %d, cols %d, color channels %d)." %
@@ -14,17 +14,16 @@ from abc import ABC, abstractmethod
14
14
 
15
15
  import numpy as np
16
16
 
17
- from bosdyn.api import (header_pb2, image_pb2, image_service_pb2, image_service_pb2_grpc,
18
- service_customization_pb2, service_fault_pb2)
17
+ from bosdyn.api import (image_pb2, image_service_pb2_grpc, service_customization_pb2,
18
+ service_fault_pb2)
19
19
  from bosdyn.client.data_buffer import DataBufferClient
20
20
  from bosdyn.client.exceptions import RpcError
21
21
  from bosdyn.client.fault import (FaultClient, ServiceFaultAlreadyExistsError,
22
22
  ServiceFaultDoesNotExistError)
23
- from bosdyn.client.image import UnsupportedPixelFormatRequestedError
24
23
  from bosdyn.client.server_util import ResponseContext, populate_response_header
25
- from bosdyn.client.service_customization_helpers import create_value_validator, validate_dict_spec
24
+ from bosdyn.client.service_customization_helpers import create_value_validator
26
25
  from bosdyn.client.util import setup_logging
27
- from bosdyn.util import sec_to_nsec, seconds_to_duration
26
+ from bosdyn.util import now_sec, seconds_to_duration
28
27
 
29
28
  _LOGGER = logging.getLogger(__name__)
30
29
 
@@ -577,13 +576,13 @@ class ImageCaptureThread():
577
576
  """Main loop for the image capture thread, which requests and saves images."""
578
577
  while not self.stop_capturing_event.is_set():
579
578
  # Get the image by calling the blocking capture function.
580
- start_time = time.time()
579
+ start_time = now_sec()
581
580
  capture, capture_time = self.capture_function()
582
581
  self.set_last_captured_image(capture, capture_time)
583
582
 
584
583
  # Wait for the total capture period (where the wait time is adjusted based on how
585
584
  # long the capture took).
586
- wait_time = self.capture_period_secs - (time.time() - start_time)
585
+ wait_time = self.capture_period_secs - (now_sec() - start_time)
587
586
  if self.stop_capturing_event.wait(wait_time):
588
587
  # If stop_capturing_event is set, then break from the capture loop now.
589
588
  break
@@ -43,4 +43,4 @@ class IREnableDisableServiceClient(BaseClient):
43
43
  request = ir_enable_disable_pb2.IREnableDisableRequest.REQUEST_OFF
44
44
  return self.call_async(self._stub.IREnableDisable,
45
45
  ir_enable_disable_pb2.IREnableDisableRequest(request=request),
46
- error_from_response=common_header_errors, **kwargs)
46
+ error_from_response=common_header_errors, **kwargs)
@@ -16,11 +16,13 @@ from typing import Callable, List, Union
16
16
 
17
17
  import bosdyn.client.lease
18
18
  import bosdyn.util
19
+ from bosdyn.api import lease_pb2
19
20
  from bosdyn.api.keepalive import keepalive_pb2, keepalive_service_pb2_grpc
20
21
  from bosdyn.client.common import (BaseClient, common_header_errors, error_factory, error_pair,
21
22
  handle_common_header_errors, handle_unset_status_error)
22
23
  from bosdyn.client.error_callback_result import ErrorCallbackResult
23
24
  from bosdyn.client.exceptions import ResponseError, RetryableRpcError
25
+ from bosdyn.util import now_sec
24
26
 
25
27
 
26
28
  class KeepaliveResponseError(ResponseError):
@@ -290,7 +292,7 @@ class PolicyKeepalive():
290
292
  # leave the loop. Under normal conditions, wait up to self._check_in_period seconds, minus
291
293
  # the RPC processing time. (values < 0 are OK and unblock immediately)
292
294
  while not self._end_check_in_signal.wait(wait_time):
293
- exec_start = time.time()
295
+ exec_start = now_sec()
294
296
  action = ErrorCallbackResult.RESUME_NORMAL_OPERATION
295
297
 
296
298
  try:
@@ -309,7 +311,7 @@ class PolicyKeepalive():
309
311
  else:
310
312
  raise
311
313
  # How long did the RPC and processing of said RPC take?
312
- exec_seconds = time.time() - exec_start
314
+ exec_seconds = now_sec() - exec_start
313
315
 
314
316
  if action == ErrorCallbackResult.ABORT:
315
317
  self.logger.warning('Callback directed the keepalive thread to exit.')
bosdyn/client/lease.py CHANGED
@@ -19,6 +19,7 @@ from bosdyn.api.lease_pb2 import (LeaseUseResult, ListLeasesRequest, RetainLease
19
19
  ReturnLeaseRequest, ReturnLeaseResponse, TakeLeaseRequest,
20
20
  TakeLeaseResponse)
21
21
  from bosdyn.api.lease_service_pb2_grpc import LeaseServiceStub
22
+ from bosdyn.util import now_sec
22
23
 
23
24
  from . import common
24
25
  from .exceptions import Error as BaseError
@@ -987,7 +988,7 @@ class LeaseKeepAlive(object):
987
988
  while True:
988
989
  # Include the time it takes to execute keep_running, in case it takes a significant
989
990
  # portion of our check in period.
990
- exec_start = time.time()
991
+ exec_start = now_sec()
991
992
 
992
993
  # Stop doing retention if this is not meant to keep running.
993
994
  if not self._keep_running():
@@ -1008,7 +1009,7 @@ class LeaseKeepAlive(object):
1008
1009
  self._ok()
1009
1010
 
1010
1011
  # How long did the RPC and processing of said RPC take?
1011
- exec_seconds = time.time() - exec_start
1012
+ exec_seconds = now_sec() - exec_start
1012
1013
 
1013
1014
  # Block and wait for the stop signal. If we receive it within the check-in period,
1014
1015
  # leave the loop. This check must be at the end of the loop!
@@ -11,7 +11,6 @@ import logging
11
11
  import threading
12
12
 
13
13
  from bosdyn.api import lease_pb2
14
- from bosdyn.client.exceptions import Error
15
14
  from bosdyn.client.lease import Lease, LeaseClient
16
15
  from bosdyn.client.lease_resource_hierarchy import ResourceHierarchy
17
16
 
@@ -12,11 +12,13 @@ This allows client code to start, extend or terminate experiment logs and start
12
12
  """
13
13
 
14
14
  import collections
15
+ import re
16
+ import time
15
17
 
16
18
  import bosdyn.util
17
19
  from bosdyn.api.log_status import log_status_pb2 as log_status
18
20
  from bosdyn.api.log_status import log_status_service_pb2_grpc as log_status_service
19
- from bosdyn.client.common import (BaseClient, common_header_errors, error_factory, error_pair,
21
+ from bosdyn.client.common import (BaseClient, error_factory, error_pair,
20
22
  handle_common_header_errors, handle_unset_status_error)
21
23
  from bosdyn.client.exceptions import ResponseError
22
24
 
@@ -41,6 +43,10 @@ class ConcurrencyLimitReachedError(LogStatusResponseError):
41
43
  """The limit of concurrent retro logs has be reached, a new log cannot be started."""
42
44
 
43
45
 
46
+ class NoDataForEventError(LogStatusResponseError):
47
+ """No data is available for the provided event, so a log cannot be started."""
48
+
49
+
44
50
  class LogStatusClient(BaseClient):
45
51
  """A client for interacting with robot logs."""
46
52
  # Typical name of the service in the robot's directory listing.
@@ -88,7 +94,7 @@ class LogStatusClient(BaseClient):
88
94
  error_from_response=get_active_log_statuses_error,
89
95
  copy_request=False, **kwargs)
90
96
 
91
- def start_experiment_log(self, seconds, **kwargs):
97
+ def start_experiment_log(self, seconds, past_textlog_duration=0, **kwargs):
92
98
  """Start an experiment log, to run for a specified duration.
93
99
 
94
100
  Args:
@@ -99,14 +105,16 @@ class LogStatusClient(BaseClient):
99
105
  """
100
106
  req = log_status.StartExperimentLogRequest()
101
107
  req.keep_alive.CopyFrom(bosdyn.util.seconds_to_duration(seconds))
108
+ req.past_textlog_duration.CopyFrom(bosdyn.util.seconds_to_duration(past_textlog_duration))
102
109
  return self.call(self._stub.StartExperimentLog, req,
103
110
  error_from_response=start_experiment_log_error, copy_request=False,
104
111
  **kwargs)
105
112
 
106
- def start_experiment_log_async(self, seconds, **kwargs):
113
+ def start_experiment_log_async(self, seconds, past_textlog_duration=0, **kwargs):
107
114
  """Start an experiment log, to run for a specified duration."""
108
115
  req = log_status.StartExperimentLogRequest()
109
116
  req.keep_alive.CopyFrom(bosdyn.util.seconds_to_duration(seconds))
117
+ req.past_textlog_duration.CopyFrom(bosdyn.util.seconds_to_duration(past_textlog_duration))
110
118
  return self.call_async(self._stub.StartExperimentLog, req,
111
119
  error_from_response=start_experiment_log_error, copy_request=False,
112
120
  **kwargs)
@@ -135,6 +143,31 @@ class LogStatusClient(BaseClient):
135
143
  error_from_response=start_retro_log_error, copy_request=False,
136
144
  **kwargs)
137
145
 
146
+ def start_concurrent_log(self, duration_seconds, event=None, **kwargs):
147
+ """Start an experiment log that allows concurrency, to run based on a particular data_set, as derived from the recipe corresponding to the provided event. An event must be provided!"""
148
+ req = log_status.StartConcurrentLogRequest()
149
+ req.keep_alive.CopyFrom(bosdyn.util.seconds_to_duration(duration_seconds))
150
+
151
+ if event:
152
+ req.event.CopyFrom(event)
153
+
154
+ return self.call(self._stub.StartConcurrentLog, req,
155
+ error_from_response=start_concurrent_log_error, copy_request=False,
156
+ **kwargs)
157
+
158
+ def start_concurrent_log_async(self, duration_seconds, data_set_names=None, properties=None,
159
+ event=None, **kwargs):
160
+ """Start an experiment log that allows concurrency, to run based on a particular data_set, as derived from the recipe corresponding to the provided event. An event must be provided!"""
161
+ req = log_status.StartConcurrentLogRequest()
162
+ req.keep_alive.CopyFrom(bosdyn.util.seconds_to_duration(duration_seconds))
163
+
164
+ if event:
165
+ req.event.CopyFrom(event)
166
+
167
+ return self.call_async(self._stub.StartConcurrentLog, req,
168
+ error_from_response=start_concurrent_log_error, copy_request=False,
169
+ **kwargs)
170
+
138
171
  def update_experiment(self, id, seconds, **kwargs):
139
172
  """Update an experiment log to run for a specified duration.
140
173
 
@@ -217,6 +250,18 @@ _START_RETRO_LOG_STATUS_TO_ERROR.update({
217
250
  error_pair(ConcurrencyLimitReachedError),
218
251
  })
219
252
 
253
+ _START_CONCURRENT_LOG_STATUS_TO_ERROR = \
254
+ collections.defaultdict(lambda: (LogStatusResponseError, None))
255
+ _START_CONCURRENT_LOG_STATUS_TO_ERROR.update({
256
+ log_status.StartConcurrentLogResponse.STATUS_OK: (None, None),
257
+ log_status.StartConcurrentLogResponse.STATUS_EXPERIMENT_LOG_RUNNING:
258
+ error_pair(ExperimentAlreadyRunningError),
259
+ log_status.StartConcurrentLogResponse.STATUS_CONCURRENCY_LIMIT_REACHED:
260
+ error_pair(ConcurrencyLimitReachedError),
261
+ log_status.StartConcurrentLogResponse.STATUS_NO_DATA_FOR_EVENT:
262
+ error_pair(NoDataForEventError),
263
+ })
264
+
220
265
  _UPDATE_EXPERIMENT_LOG_STATUS_TO_ERROR = \
221
266
  collections.defaultdict(lambda: (LogStatusResponseError, None))
222
267
  _UPDATE_EXPERIMENT_LOG_STATUS_TO_ERROR.update({
@@ -272,6 +317,15 @@ def start_retro_log_error(response):
272
317
  status_to_error=_START_RETRO_LOG_STATUS_TO_ERROR)
273
318
 
274
319
 
320
+ @handle_common_header_errors
321
+ @handle_unset_status_error(unset='STATUS_UNKNOWN')
322
+ def start_concurrent_log_error(response):
323
+ """Return a custom exception based on the StartConcurrentLog response, None if no error."""
324
+ return error_factory(response, response.status,
325
+ status_to_string=log_status.StartConcurrentLogResponse.Status.Name,
326
+ status_to_error=_START_CONCURRENT_LOG_STATUS_TO_ERROR)
327
+
328
+
275
329
  @handle_common_header_errors
276
330
  @handle_unset_status_error(unset='STATUS_UNKNOWN')
277
331
  def update_experiment_log_error(response):
@@ -6,11 +6,9 @@
6
6
 
7
7
  """For clients of the graph_nav map processing service."""
8
8
 
9
- from bosdyn.api.graph_nav import map_pb2, map_processing_pb2, map_processing_service_pb2
9
+ from bosdyn.api.graph_nav import map_processing_pb2
10
10
  from bosdyn.api.graph_nav import map_processing_service_pb2_grpc as map_processing
11
- from bosdyn.client.common import (BaseClient, common_header_errors, error_factory,
12
- handle_common_header_errors, handle_lease_use_result_errors,
13
- handle_unset_status_error)
11
+ from bosdyn.client.common import BaseClient, handle_common_header_errors, handle_unset_status_error
14
12
  from bosdyn.client.exceptions import ResponseError
15
13
 
16
14