standardbots 2.20250128.44__py3-none-any.whl → 2.20250417.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of standardbots might be problematic. Click here for more details.
- standardbots/auto_generated/apis.py +927 -65
- standardbots/auto_generated/models.py +1569 -75
- {standardbots-2.20250128.44.dist-info → standardbots-2.20250417.2.dist-info}/METADATA +1 -1
- standardbots-2.20250417.2.dist-info/RECORD +12 -0
- {standardbots-2.20250128.44.dist-info → standardbots-2.20250417.2.dist-info}/WHEEL +1 -1
- tests/fixtures/client_fixt.py +0 -2
- tests/fixtures/robot_fixt.py +49 -0
- tests/fixtures/routines_fixt.py +23 -0
- standardbots-2.20250128.44.dist-info/RECORD +0 -11
- {standardbots-2.20250128.44.dist-info → standardbots-2.20250417.2.dist-info}/top_level.txt +0 -0
|
@@ -85,11 +85,11 @@ class Default:
|
|
|
85
85
|
self,
|
|
86
86
|
) -> Response[
|
|
87
87
|
Union[
|
|
88
|
-
models.
|
|
88
|
+
models.ActiveCalibrationContainer,
|
|
89
89
|
models.ErrorResponse,
|
|
90
90
|
None
|
|
91
91
|
],
|
|
92
|
-
models.
|
|
92
|
+
models.ActiveCalibrationContainer
|
|
93
93
|
]:
|
|
94
94
|
"""
|
|
95
95
|
Get the active calibration for the robot.
|
|
@@ -104,7 +104,47 @@ class Default:
|
|
|
104
104
|
)
|
|
105
105
|
parsed = None
|
|
106
106
|
if response.status == 200:
|
|
107
|
-
parsed = models.
|
|
107
|
+
parsed = models.parse_active_calibration_container(json.loads(response.data))
|
|
108
|
+
|
|
109
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
110
|
+
is_unavailable = response.status == 503
|
|
111
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
112
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
113
|
+
|
|
114
|
+
return Response(
|
|
115
|
+
parsed,
|
|
116
|
+
response.status,
|
|
117
|
+
response
|
|
118
|
+
)
|
|
119
|
+
except urllib3.exceptions.MaxRetryError:
|
|
120
|
+
return Response(
|
|
121
|
+
models.ErrorResponse(
|
|
122
|
+
error=models.ErrorEnum.InternalServerError,
|
|
123
|
+
message="Connection Refused"
|
|
124
|
+
),
|
|
125
|
+
503,
|
|
126
|
+
None
|
|
127
|
+
)
|
|
128
|
+
def set_active_calibration(
|
|
129
|
+
self,
|
|
130
|
+
body: models.ActiveCalibrationContainer,
|
|
131
|
+
) -> Response[
|
|
132
|
+
None,
|
|
133
|
+
None
|
|
134
|
+
]:
|
|
135
|
+
"""
|
|
136
|
+
Set the active calibration for the robot.
|
|
137
|
+
|
|
138
|
+
"""
|
|
139
|
+
path = "/api/v1/calibration/active"
|
|
140
|
+
try:
|
|
141
|
+
response = self._request_manager.request(
|
|
142
|
+
"POST",
|
|
143
|
+
path,
|
|
144
|
+
headers=self._request_manager.json_headers(),
|
|
145
|
+
body=json.dumps(models.serialize_active_calibration_container(body)),
|
|
146
|
+
)
|
|
147
|
+
parsed = None
|
|
108
148
|
|
|
109
149
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
110
150
|
is_unavailable = response.status == 503
|
|
@@ -132,7 +172,7 @@ class Default:
|
|
|
132
172
|
def onrobot_2fg7_move(
|
|
133
173
|
self,
|
|
134
174
|
value: Union[int, float],
|
|
135
|
-
direction: Union[str, models.LinearGripDirectionEnum] = models.LinearGripDirectionEnum.
|
|
175
|
+
direction: Union[str, models.LinearGripDirectionEnum] = models.LinearGripDirectionEnum.Internal,
|
|
136
176
|
unit_kind: Union[str, models.LinearUnitKind] = models.LinearUnitKind.Millimeters
|
|
137
177
|
):
|
|
138
178
|
"""Move the robot to the onrobot_2fg7 position.
|
|
@@ -154,7 +194,7 @@ class Default:
|
|
|
154
194
|
def onrobot_2fg7_grip(
|
|
155
195
|
self,
|
|
156
196
|
value: Union[int, float],
|
|
157
|
-
direction: Union[str, models.LinearGripDirectionEnum] = models.LinearGripDirectionEnum.
|
|
197
|
+
direction: Union[str, models.LinearGripDirectionEnum] = models.LinearGripDirectionEnum.Internal,
|
|
158
198
|
unit_kind: Union[str, models.LinearUnitKind] = models.LinearUnitKind.Millimeters,
|
|
159
199
|
force: Union[int, float] = 0.0,
|
|
160
200
|
force_unit: Union[str, models.ForceUnitKind] = models.ForceUnitKind.Newtons
|
|
@@ -240,30 +280,637 @@ class Default:
|
|
|
240
280
|
dh_cgi=models.DHCGIGripperCommandRequest(
|
|
241
281
|
target_diameter, target_force, target_speed
|
|
242
282
|
),
|
|
243
|
-
),
|
|
244
|
-
)
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
def control_gripper(
|
|
283
|
+
),
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def control_gripper(
|
|
288
|
+
self,
|
|
289
|
+
body: models.GripperCommandRequest,
|
|
290
|
+
) -> Response[
|
|
291
|
+
None,
|
|
292
|
+
None
|
|
293
|
+
]:
|
|
294
|
+
"""
|
|
295
|
+
Send commands to control the Gripper (End Effector) of the robot. The gripper can be any of Standard Bots supported grippers.
|
|
296
|
+
|
|
297
|
+
"""
|
|
298
|
+
path = "/api/v1/equipment/end-effector/control"
|
|
299
|
+
try:
|
|
300
|
+
response = self._request_manager.request(
|
|
301
|
+
"POST",
|
|
302
|
+
path,
|
|
303
|
+
headers=self._request_manager.json_headers(),
|
|
304
|
+
body=json.dumps(models.serialize_gripper_command_request(body)),
|
|
305
|
+
)
|
|
306
|
+
parsed = None
|
|
307
|
+
|
|
308
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
309
|
+
is_unavailable = response.status == 503
|
|
310
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
311
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
312
|
+
|
|
313
|
+
return Response(
|
|
314
|
+
parsed,
|
|
315
|
+
response.status,
|
|
316
|
+
response
|
|
317
|
+
)
|
|
318
|
+
except urllib3.exceptions.MaxRetryError:
|
|
319
|
+
return Response(
|
|
320
|
+
models.ErrorResponse(
|
|
321
|
+
error=models.ErrorEnum.InternalServerError,
|
|
322
|
+
message="Connection Refused"
|
|
323
|
+
),
|
|
324
|
+
503,
|
|
325
|
+
None
|
|
326
|
+
)
|
|
327
|
+
def get_gripper_configuration(
|
|
328
|
+
self,
|
|
329
|
+
) -> Response[
|
|
330
|
+
Union[
|
|
331
|
+
models.GripperConfiguration,
|
|
332
|
+
models.ErrorResponse,
|
|
333
|
+
None
|
|
334
|
+
],
|
|
335
|
+
models.GripperConfiguration
|
|
336
|
+
]:
|
|
337
|
+
"""
|
|
338
|
+
Get the current gripper configuration
|
|
339
|
+
|
|
340
|
+
"""
|
|
341
|
+
path = "/api/v1/equipment/end-effector/configuration"
|
|
342
|
+
try:
|
|
343
|
+
response = self._request_manager.request(
|
|
344
|
+
"GET",
|
|
345
|
+
path,
|
|
346
|
+
headers=self._request_manager.json_headers(),
|
|
347
|
+
)
|
|
348
|
+
parsed = None
|
|
349
|
+
if response.status == 200:
|
|
350
|
+
parsed = models.parse_gripper_configuration(json.loads(response.data))
|
|
351
|
+
|
|
352
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
353
|
+
is_unavailable = response.status == 503
|
|
354
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
355
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
356
|
+
|
|
357
|
+
return Response(
|
|
358
|
+
parsed,
|
|
359
|
+
response.status,
|
|
360
|
+
response
|
|
361
|
+
)
|
|
362
|
+
except urllib3.exceptions.MaxRetryError:
|
|
363
|
+
return Response(
|
|
364
|
+
models.ErrorResponse(
|
|
365
|
+
error=models.ErrorEnum.InternalServerError,
|
|
366
|
+
message="Connection Refused"
|
|
367
|
+
),
|
|
368
|
+
503,
|
|
369
|
+
None
|
|
370
|
+
)
|
|
371
|
+
class Payload:
|
|
372
|
+
def __init__(self, request_manager: RequestManager):
|
|
373
|
+
self._request_manager = request_manager
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def set_payload(
|
|
377
|
+
self,
|
|
378
|
+
body: models.PayloadStateRequest,
|
|
379
|
+
) -> Response[
|
|
380
|
+
Union[
|
|
381
|
+
models.ErrorResponse,
|
|
382
|
+
models.ErrorResponse,
|
|
383
|
+
None
|
|
384
|
+
],
|
|
385
|
+
None
|
|
386
|
+
]:
|
|
387
|
+
"""
|
|
388
|
+
Set a value for the mass being carried by the robot's end-effector.
|
|
389
|
+
|
|
390
|
+
"""
|
|
391
|
+
path = "/api/v1/payload"
|
|
392
|
+
try:
|
|
393
|
+
response = self._request_manager.request(
|
|
394
|
+
"POST",
|
|
395
|
+
path,
|
|
396
|
+
headers=self._request_manager.json_headers(),
|
|
397
|
+
body=json.dumps(models.serialize_payload_state_request(body)),
|
|
398
|
+
)
|
|
399
|
+
parsed = None
|
|
400
|
+
if response.status == 400:
|
|
401
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
402
|
+
|
|
403
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
404
|
+
is_unavailable = response.status == 503
|
|
405
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
406
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
407
|
+
|
|
408
|
+
return Response(
|
|
409
|
+
parsed,
|
|
410
|
+
response.status,
|
|
411
|
+
response
|
|
412
|
+
)
|
|
413
|
+
except urllib3.exceptions.MaxRetryError:
|
|
414
|
+
return Response(
|
|
415
|
+
models.ErrorResponse(
|
|
416
|
+
error=models.ErrorEnum.InternalServerError,
|
|
417
|
+
message="Connection Refused"
|
|
418
|
+
),
|
|
419
|
+
503,
|
|
420
|
+
None
|
|
421
|
+
)
|
|
422
|
+
def get_payload(
|
|
423
|
+
self,
|
|
424
|
+
) -> Response[
|
|
425
|
+
Union[
|
|
426
|
+
models.PayloadStateResponse,
|
|
427
|
+
models.ErrorResponse,
|
|
428
|
+
None
|
|
429
|
+
],
|
|
430
|
+
models.PayloadStateResponse
|
|
431
|
+
]:
|
|
432
|
+
"""
|
|
433
|
+
Get the current mass value being carried by the robot's end-effector.
|
|
434
|
+
|
|
435
|
+
"""
|
|
436
|
+
path = "/api/v1/payload"
|
|
437
|
+
try:
|
|
438
|
+
response = self._request_manager.request(
|
|
439
|
+
"GET",
|
|
440
|
+
path,
|
|
441
|
+
headers=self._request_manager.json_headers(),
|
|
442
|
+
)
|
|
443
|
+
parsed = None
|
|
444
|
+
if response.status == 200:
|
|
445
|
+
parsed = models.parse_payload_state_response(json.loads(response.data))
|
|
446
|
+
|
|
447
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
448
|
+
is_unavailable = response.status == 503
|
|
449
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
450
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
451
|
+
|
|
452
|
+
return Response(
|
|
453
|
+
parsed,
|
|
454
|
+
response.status,
|
|
455
|
+
response
|
|
456
|
+
)
|
|
457
|
+
except urllib3.exceptions.MaxRetryError:
|
|
458
|
+
return Response(
|
|
459
|
+
models.ErrorResponse(
|
|
460
|
+
error=models.ErrorEnum.InternalServerError,
|
|
461
|
+
message="Connection Refused"
|
|
462
|
+
),
|
|
463
|
+
503,
|
|
464
|
+
None
|
|
465
|
+
)
|
|
466
|
+
class Recorder:
|
|
467
|
+
def __init__(self, request_manager: RequestManager):
|
|
468
|
+
self._request_manager = request_manager
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
def get_recorder_state(
|
|
472
|
+
self,
|
|
473
|
+
) -> Response[
|
|
474
|
+
Union[
|
|
475
|
+
models.RecorderState,
|
|
476
|
+
models.ErrorResponse,
|
|
477
|
+
None
|
|
478
|
+
],
|
|
479
|
+
models.RecorderState
|
|
480
|
+
]:
|
|
481
|
+
"""
|
|
482
|
+
Get the state of the recorder
|
|
483
|
+
"""
|
|
484
|
+
path = "/api/v1/recorder/state"
|
|
485
|
+
try:
|
|
486
|
+
response = self._request_manager.request(
|
|
487
|
+
"GET",
|
|
488
|
+
path,
|
|
489
|
+
headers=self._request_manager.json_headers(),
|
|
490
|
+
)
|
|
491
|
+
parsed = None
|
|
492
|
+
if response.status == 200:
|
|
493
|
+
parsed = models.parse_recorder_state(json.loads(response.data))
|
|
494
|
+
|
|
495
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
496
|
+
is_unavailable = response.status == 503
|
|
497
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
498
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
499
|
+
|
|
500
|
+
return Response(
|
|
501
|
+
parsed,
|
|
502
|
+
response.status,
|
|
503
|
+
response
|
|
504
|
+
)
|
|
505
|
+
except urllib3.exceptions.MaxRetryError:
|
|
506
|
+
return Response(
|
|
507
|
+
models.ErrorResponse(
|
|
508
|
+
error=models.ErrorEnum.InternalServerError,
|
|
509
|
+
message="Connection Refused"
|
|
510
|
+
),
|
|
511
|
+
503,
|
|
512
|
+
None
|
|
513
|
+
)
|
|
514
|
+
def update_recording(
|
|
515
|
+
self,
|
|
516
|
+
body: models.RecorderConfig,
|
|
517
|
+
) -> Response[
|
|
518
|
+
Union[
|
|
519
|
+
models.UpdateRecordingResponse,
|
|
520
|
+
models.ErrorResponse,
|
|
521
|
+
None
|
|
522
|
+
],
|
|
523
|
+
models.UpdateRecordingResponse
|
|
524
|
+
]:
|
|
525
|
+
"""
|
|
526
|
+
Update recording configuration
|
|
527
|
+
"""
|
|
528
|
+
path = "/api/v1/recorder/update"
|
|
529
|
+
try:
|
|
530
|
+
response = self._request_manager.request(
|
|
531
|
+
"POST",
|
|
532
|
+
path,
|
|
533
|
+
headers=self._request_manager.json_headers(),
|
|
534
|
+
body=json.dumps(models.serialize_recorder_config(body)),
|
|
535
|
+
)
|
|
536
|
+
parsed = None
|
|
537
|
+
if response.status == 200:
|
|
538
|
+
parsed = models.parse_update_recording_response(json.loads(response.data))
|
|
539
|
+
|
|
540
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
541
|
+
is_unavailable = response.status == 503
|
|
542
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
543
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
544
|
+
|
|
545
|
+
return Response(
|
|
546
|
+
parsed,
|
|
547
|
+
response.status,
|
|
548
|
+
response
|
|
549
|
+
)
|
|
550
|
+
except urllib3.exceptions.MaxRetryError:
|
|
551
|
+
return Response(
|
|
552
|
+
models.ErrorResponse(
|
|
553
|
+
error=models.ErrorEnum.InternalServerError,
|
|
554
|
+
message="Connection Refused"
|
|
555
|
+
),
|
|
556
|
+
503,
|
|
557
|
+
None
|
|
558
|
+
)
|
|
559
|
+
def enable_recorder_bot(
|
|
560
|
+
self,
|
|
561
|
+
body: models.ToggleRecorderBotRequest,
|
|
562
|
+
) -> Response[
|
|
563
|
+
Union[
|
|
564
|
+
models.UpdateRecordingResponse,
|
|
565
|
+
models.ErrorResponse,
|
|
566
|
+
None
|
|
567
|
+
],
|
|
568
|
+
models.UpdateRecordingResponse
|
|
569
|
+
]:
|
|
570
|
+
"""
|
|
571
|
+
Enable or disable a secondary bot
|
|
572
|
+
"""
|
|
573
|
+
path = "/api/v1/recorder/set-bot-enabled"
|
|
574
|
+
try:
|
|
575
|
+
response = self._request_manager.request(
|
|
576
|
+
"POST",
|
|
577
|
+
path,
|
|
578
|
+
headers=self._request_manager.json_headers(),
|
|
579
|
+
body=json.dumps(models.serialize_toggle_recorder_bot_request(body)),
|
|
580
|
+
)
|
|
581
|
+
parsed = None
|
|
582
|
+
if response.status == 200:
|
|
583
|
+
parsed = models.parse_update_recording_response(json.loads(response.data))
|
|
584
|
+
|
|
585
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
586
|
+
is_unavailable = response.status == 503
|
|
587
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
588
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
589
|
+
|
|
590
|
+
return Response(
|
|
591
|
+
parsed,
|
|
592
|
+
response.status,
|
|
593
|
+
response
|
|
594
|
+
)
|
|
595
|
+
except urllib3.exceptions.MaxRetryError:
|
|
596
|
+
return Response(
|
|
597
|
+
models.ErrorResponse(
|
|
598
|
+
error=models.ErrorEnum.InternalServerError,
|
|
599
|
+
message="Connection Refused"
|
|
600
|
+
),
|
|
601
|
+
503,
|
|
602
|
+
None
|
|
603
|
+
)
|
|
604
|
+
def start_recording(
|
|
605
|
+
self,
|
|
606
|
+
body: models.StartRecordingRequest,
|
|
607
|
+
) -> Response[
|
|
608
|
+
Union[
|
|
609
|
+
models.StartRecordingResponse,
|
|
610
|
+
models.ErrorResponse,
|
|
611
|
+
None
|
|
612
|
+
],
|
|
613
|
+
models.StartRecordingResponse
|
|
614
|
+
]:
|
|
615
|
+
"""
|
|
616
|
+
Start recording movement and camera data
|
|
617
|
+
"""
|
|
618
|
+
path = "/api/v1/recorder/start"
|
|
619
|
+
try:
|
|
620
|
+
response = self._request_manager.request(
|
|
621
|
+
"POST",
|
|
622
|
+
path,
|
|
623
|
+
headers=self._request_manager.json_headers(),
|
|
624
|
+
body=json.dumps(models.serialize_start_recording_request(body)),
|
|
625
|
+
)
|
|
626
|
+
parsed = None
|
|
627
|
+
if response.status == 200:
|
|
628
|
+
parsed = models.parse_start_recording_response(json.loads(response.data))
|
|
629
|
+
|
|
630
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
631
|
+
is_unavailable = response.status == 503
|
|
632
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
633
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
634
|
+
|
|
635
|
+
return Response(
|
|
636
|
+
parsed,
|
|
637
|
+
response.status,
|
|
638
|
+
response
|
|
639
|
+
)
|
|
640
|
+
except urllib3.exceptions.MaxRetryError:
|
|
641
|
+
return Response(
|
|
642
|
+
models.ErrorResponse(
|
|
643
|
+
error=models.ErrorEnum.InternalServerError,
|
|
644
|
+
message="Connection Refused"
|
|
645
|
+
),
|
|
646
|
+
503,
|
|
647
|
+
None
|
|
648
|
+
)
|
|
649
|
+
def stop_recording(
|
|
650
|
+
self,
|
|
651
|
+
body: models.StopRecordingRequest,
|
|
652
|
+
) -> Response[
|
|
653
|
+
Union[
|
|
654
|
+
models.StopRecordingResponse,
|
|
655
|
+
models.ErrorResponse,
|
|
656
|
+
None
|
|
657
|
+
],
|
|
658
|
+
models.StopRecordingResponse
|
|
659
|
+
]:
|
|
660
|
+
"""
|
|
661
|
+
Stop recording movement and camera data
|
|
662
|
+
"""
|
|
663
|
+
path = "/api/v1/recorder/stop"
|
|
664
|
+
try:
|
|
665
|
+
response = self._request_manager.request(
|
|
666
|
+
"POST",
|
|
667
|
+
path,
|
|
668
|
+
headers=self._request_manager.json_headers(),
|
|
669
|
+
body=json.dumps(models.serialize_stop_recording_request(body)),
|
|
670
|
+
)
|
|
671
|
+
parsed = None
|
|
672
|
+
if response.status == 200:
|
|
673
|
+
parsed = models.parse_stop_recording_response(json.loads(response.data))
|
|
674
|
+
|
|
675
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
676
|
+
is_unavailable = response.status == 503
|
|
677
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
678
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
679
|
+
|
|
680
|
+
return Response(
|
|
681
|
+
parsed,
|
|
682
|
+
response.status,
|
|
683
|
+
response
|
|
684
|
+
)
|
|
685
|
+
except urllib3.exceptions.MaxRetryError:
|
|
686
|
+
return Response(
|
|
687
|
+
models.ErrorResponse(
|
|
688
|
+
error=models.ErrorEnum.InternalServerError,
|
|
689
|
+
message="Connection Refused"
|
|
690
|
+
),
|
|
691
|
+
503,
|
|
692
|
+
None
|
|
693
|
+
)
|
|
694
|
+
def save_recording(
|
|
695
|
+
self,
|
|
696
|
+
body: models.SaveRecordingRequest,
|
|
697
|
+
) -> Response[
|
|
698
|
+
Union[
|
|
699
|
+
models.SaveRecordingResponse,
|
|
700
|
+
models.ErrorResponse,
|
|
701
|
+
None
|
|
702
|
+
],
|
|
703
|
+
models.SaveRecordingResponse
|
|
704
|
+
]:
|
|
705
|
+
"""
|
|
706
|
+
Save recording to marvin app
|
|
707
|
+
"""
|
|
708
|
+
path = "/api/v1/recorder/save"
|
|
709
|
+
try:
|
|
710
|
+
response = self._request_manager.request(
|
|
711
|
+
"POST",
|
|
712
|
+
path,
|
|
713
|
+
headers=self._request_manager.json_headers(),
|
|
714
|
+
body=json.dumps(models.serialize_save_recording_request(body)),
|
|
715
|
+
)
|
|
716
|
+
parsed = None
|
|
717
|
+
if response.status == 200:
|
|
718
|
+
parsed = models.parse_save_recording_response(json.loads(response.data))
|
|
719
|
+
|
|
720
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
721
|
+
is_unavailable = response.status == 503
|
|
722
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
723
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
724
|
+
|
|
725
|
+
return Response(
|
|
726
|
+
parsed,
|
|
727
|
+
response.status,
|
|
728
|
+
response
|
|
729
|
+
)
|
|
730
|
+
except urllib3.exceptions.MaxRetryError:
|
|
731
|
+
return Response(
|
|
732
|
+
models.ErrorResponse(
|
|
733
|
+
error=models.ErrorEnum.InternalServerError,
|
|
734
|
+
message="Connection Refused"
|
|
735
|
+
),
|
|
736
|
+
503,
|
|
737
|
+
None
|
|
738
|
+
)
|
|
739
|
+
class Sensors:
|
|
740
|
+
def __init__(self, request_manager: RequestManager):
|
|
741
|
+
self._request_manager = request_manager
|
|
742
|
+
|
|
743
|
+
|
|
744
|
+
def get_sensors(
|
|
745
|
+
self,
|
|
746
|
+
) -> Response[
|
|
747
|
+
Union[
|
|
748
|
+
models.SensorsConfiguration,
|
|
749
|
+
models.ErrorResponse,
|
|
750
|
+
None
|
|
751
|
+
],
|
|
752
|
+
models.SensorsConfiguration
|
|
753
|
+
]:
|
|
754
|
+
"""
|
|
755
|
+
Get the current state of all sensors
|
|
756
|
+
"""
|
|
757
|
+
path = "/api/v1/equipment/custom/sensors"
|
|
758
|
+
try:
|
|
759
|
+
response = self._request_manager.request(
|
|
760
|
+
"GET",
|
|
761
|
+
path,
|
|
762
|
+
headers=self._request_manager.json_headers(),
|
|
763
|
+
)
|
|
764
|
+
parsed = None
|
|
765
|
+
if response.status == 200:
|
|
766
|
+
parsed = models.parse_sensors_configuration(json.loads(response.data))
|
|
767
|
+
|
|
768
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
769
|
+
is_unavailable = response.status == 503
|
|
770
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
771
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
772
|
+
|
|
773
|
+
return Response(
|
|
774
|
+
parsed,
|
|
775
|
+
response.status,
|
|
776
|
+
response
|
|
777
|
+
)
|
|
778
|
+
except urllib3.exceptions.MaxRetryError:
|
|
779
|
+
return Response(
|
|
780
|
+
models.ErrorResponse(
|
|
781
|
+
error=models.ErrorEnum.InternalServerError,
|
|
782
|
+
message="Connection Refused"
|
|
783
|
+
),
|
|
784
|
+
503,
|
|
785
|
+
None
|
|
786
|
+
)
|
|
787
|
+
class Space:
|
|
788
|
+
def __init__(self, request_manager: RequestManager):
|
|
789
|
+
self._request_manager = request_manager
|
|
790
|
+
|
|
791
|
+
|
|
792
|
+
def list_planes(
|
|
793
|
+
self,
|
|
794
|
+
limit: int,
|
|
795
|
+
offset: int,
|
|
796
|
+
) -> Response[
|
|
797
|
+
Union[
|
|
798
|
+
models.PlanesPaginatedResponse,
|
|
799
|
+
models.ErrorResponse,
|
|
800
|
+
None
|
|
801
|
+
],
|
|
802
|
+
models.PlanesPaginatedResponse
|
|
803
|
+
]:
|
|
804
|
+
"""
|
|
805
|
+
List Planes
|
|
806
|
+
"""
|
|
807
|
+
path = "/api/v1/space/planes"
|
|
808
|
+
try:
|
|
809
|
+
response = self._request_manager.request(
|
|
810
|
+
"GET",
|
|
811
|
+
path,
|
|
812
|
+
headers=self._request_manager.json_headers(),
|
|
813
|
+
fields={
|
|
814
|
+
"limit": models.serialize_i_64(limit),
|
|
815
|
+
"offset": models.serialize_i_64(offset),
|
|
816
|
+
}
|
|
817
|
+
)
|
|
818
|
+
parsed = None
|
|
819
|
+
if response.status == 200:
|
|
820
|
+
parsed = models.parse_planes_paginated_response(json.loads(response.data))
|
|
821
|
+
|
|
822
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
823
|
+
is_unavailable = response.status == 503
|
|
824
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
825
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
826
|
+
|
|
827
|
+
return Response(
|
|
828
|
+
parsed,
|
|
829
|
+
response.status,
|
|
830
|
+
response
|
|
831
|
+
)
|
|
832
|
+
except urllib3.exceptions.MaxRetryError:
|
|
833
|
+
return Response(
|
|
834
|
+
models.ErrorResponse(
|
|
835
|
+
error=models.ErrorEnum.InternalServerError,
|
|
836
|
+
message="Connection Refused"
|
|
837
|
+
),
|
|
838
|
+
503,
|
|
839
|
+
None
|
|
840
|
+
)
|
|
841
|
+
class Teleop:
|
|
842
|
+
def __init__(self, request_manager: RequestManager):
|
|
843
|
+
self._request_manager = request_manager
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
def get_state(
|
|
847
|
+
self,
|
|
848
|
+
) -> Response[
|
|
849
|
+
Union[
|
|
850
|
+
models.TeleopState,
|
|
851
|
+
models.ErrorResponse,
|
|
852
|
+
None
|
|
853
|
+
],
|
|
854
|
+
models.TeleopState
|
|
855
|
+
]:
|
|
856
|
+
"""
|
|
857
|
+
Get the state of the teleop
|
|
858
|
+
"""
|
|
859
|
+
path = "/api/v1/teleop/state"
|
|
860
|
+
try:
|
|
861
|
+
response = self._request_manager.request(
|
|
862
|
+
"GET",
|
|
863
|
+
path,
|
|
864
|
+
headers=self._request_manager.json_headers(),
|
|
865
|
+
)
|
|
866
|
+
parsed = None
|
|
867
|
+
if response.status == 200:
|
|
868
|
+
parsed = models.parse_teleop_state(json.loads(response.data))
|
|
869
|
+
|
|
870
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
871
|
+
is_unavailable = response.status == 503
|
|
872
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
873
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
874
|
+
|
|
875
|
+
return Response(
|
|
876
|
+
parsed,
|
|
877
|
+
response.status,
|
|
878
|
+
response
|
|
879
|
+
)
|
|
880
|
+
except urllib3.exceptions.MaxRetryError:
|
|
881
|
+
return Response(
|
|
882
|
+
models.ErrorResponse(
|
|
883
|
+
error=models.ErrorEnum.InternalServerError,
|
|
884
|
+
message="Connection Refused"
|
|
885
|
+
),
|
|
886
|
+
503,
|
|
887
|
+
None
|
|
888
|
+
)
|
|
889
|
+
def enable_bot(
|
|
248
890
|
self,
|
|
249
|
-
body: models.
|
|
891
|
+
body: models.ToggleTeleopBotRequest,
|
|
250
892
|
) -> Response[
|
|
251
|
-
|
|
252
|
-
|
|
893
|
+
Union[
|
|
894
|
+
models.TeleopState,
|
|
895
|
+
models.ErrorResponse,
|
|
896
|
+
None
|
|
897
|
+
],
|
|
898
|
+
models.TeleopState
|
|
253
899
|
]:
|
|
254
900
|
"""
|
|
255
|
-
|
|
256
|
-
|
|
901
|
+
Enable or disable a secondary bot
|
|
257
902
|
"""
|
|
258
|
-
path = "/api/v1/
|
|
903
|
+
path = "/api/v1/teleop/set-bot-enabled"
|
|
259
904
|
try:
|
|
260
905
|
response = self._request_manager.request(
|
|
261
906
|
"POST",
|
|
262
907
|
path,
|
|
263
908
|
headers=self._request_manager.json_headers(),
|
|
264
|
-
body=json.dumps(models.
|
|
909
|
+
body=json.dumps(models.serialize_toggle_teleop_bot_request(body)),
|
|
265
910
|
)
|
|
266
911
|
parsed = None
|
|
912
|
+
if response.status == 200:
|
|
913
|
+
parsed = models.parse_teleop_state(json.loads(response.data))
|
|
267
914
|
|
|
268
915
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
269
916
|
is_unavailable = response.status == 503
|
|
@@ -284,30 +931,31 @@ class Default:
|
|
|
284
931
|
503,
|
|
285
932
|
None
|
|
286
933
|
)
|
|
287
|
-
def
|
|
934
|
+
def set_teleop_config(
|
|
288
935
|
self,
|
|
936
|
+
body: models.TeleopConfig,
|
|
289
937
|
) -> Response[
|
|
290
938
|
Union[
|
|
291
|
-
models.
|
|
939
|
+
models.TeleopState,
|
|
292
940
|
models.ErrorResponse,
|
|
293
941
|
None
|
|
294
942
|
],
|
|
295
|
-
models.
|
|
943
|
+
models.TeleopState
|
|
296
944
|
]:
|
|
297
945
|
"""
|
|
298
|
-
|
|
299
|
-
|
|
946
|
+
Set teleop config parameters
|
|
300
947
|
"""
|
|
301
|
-
path = "/api/v1/
|
|
948
|
+
path = "/api/v1/teleop/set-config"
|
|
302
949
|
try:
|
|
303
950
|
response = self._request_manager.request(
|
|
304
|
-
"
|
|
951
|
+
"POST",
|
|
305
952
|
path,
|
|
306
953
|
headers=self._request_manager.json_headers(),
|
|
954
|
+
body=json.dumps(models.serialize_teleop_config(body)),
|
|
307
955
|
)
|
|
308
956
|
parsed = None
|
|
309
957
|
if response.status == 200:
|
|
310
|
-
parsed = models.
|
|
958
|
+
parsed = models.parse_teleop_state(json.loads(response.data))
|
|
311
959
|
|
|
312
960
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
313
961
|
is_unavailable = response.status == 503
|
|
@@ -328,34 +976,29 @@ class Default:
|
|
|
328
976
|
503,
|
|
329
977
|
None
|
|
330
978
|
)
|
|
331
|
-
|
|
332
|
-
def __init__(self, request_manager: RequestManager):
|
|
333
|
-
self._request_manager = request_manager
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
def get_sensors(
|
|
979
|
+
def start_teleop(
|
|
337
980
|
self,
|
|
338
981
|
) -> Response[
|
|
339
982
|
Union[
|
|
340
|
-
models.
|
|
983
|
+
models.StartTeleopResponse,
|
|
341
984
|
models.ErrorResponse,
|
|
342
985
|
None
|
|
343
986
|
],
|
|
344
|
-
models.
|
|
987
|
+
models.StartTeleopResponse
|
|
345
988
|
]:
|
|
346
989
|
"""
|
|
347
|
-
|
|
990
|
+
Start teleoperation
|
|
348
991
|
"""
|
|
349
|
-
path = "/api/v1/
|
|
992
|
+
path = "/api/v1/teleop/start"
|
|
350
993
|
try:
|
|
351
994
|
response = self._request_manager.request(
|
|
352
|
-
"
|
|
995
|
+
"POST",
|
|
353
996
|
path,
|
|
354
997
|
headers=self._request_manager.json_headers(),
|
|
355
998
|
)
|
|
356
999
|
parsed = None
|
|
357
1000
|
if response.status == 200:
|
|
358
|
-
parsed = models.
|
|
1001
|
+
parsed = models.parse_start_teleop_response(json.loads(response.data))
|
|
359
1002
|
|
|
360
1003
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
361
1004
|
is_unavailable = response.status == 503
|
|
@@ -376,40 +1019,74 @@ class Default:
|
|
|
376
1019
|
503,
|
|
377
1020
|
None
|
|
378
1021
|
)
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
1022
|
+
def stop_teleop(
|
|
1023
|
+
self,
|
|
1024
|
+
) -> Response[
|
|
1025
|
+
Union[
|
|
1026
|
+
models.StopTeleopResponse,
|
|
1027
|
+
models.ErrorResponse,
|
|
1028
|
+
None
|
|
1029
|
+
],
|
|
1030
|
+
models.StopTeleopResponse
|
|
1031
|
+
]:
|
|
1032
|
+
"""
|
|
1033
|
+
Stop recording movement and camera data
|
|
1034
|
+
"""
|
|
1035
|
+
path = "/api/v1/teleop/stop"
|
|
1036
|
+
try:
|
|
1037
|
+
response = self._request_manager.request(
|
|
1038
|
+
"POST",
|
|
1039
|
+
path,
|
|
1040
|
+
headers=self._request_manager.json_headers(),
|
|
1041
|
+
)
|
|
1042
|
+
parsed = None
|
|
1043
|
+
if response.status == 200:
|
|
1044
|
+
parsed = models.parse_stop_teleop_response(json.loads(response.data))
|
|
382
1045
|
|
|
1046
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
1047
|
+
is_unavailable = response.status == 503
|
|
1048
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
1049
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
383
1050
|
|
|
384
|
-
|
|
1051
|
+
return Response(
|
|
1052
|
+
parsed,
|
|
1053
|
+
response.status,
|
|
1054
|
+
response
|
|
1055
|
+
)
|
|
1056
|
+
except urllib3.exceptions.MaxRetryError:
|
|
1057
|
+
return Response(
|
|
1058
|
+
models.ErrorResponse(
|
|
1059
|
+
error=models.ErrorEnum.InternalServerError,
|
|
1060
|
+
message="Connection Refused"
|
|
1061
|
+
),
|
|
1062
|
+
503,
|
|
1063
|
+
None
|
|
1064
|
+
)
|
|
1065
|
+
def set_ratio_control(
|
|
385
1066
|
self,
|
|
386
|
-
|
|
387
|
-
offset: int,
|
|
1067
|
+
body: models.SetRatioControlRequest,
|
|
388
1068
|
) -> Response[
|
|
389
1069
|
Union[
|
|
390
|
-
models.
|
|
1070
|
+
models.TeleopState,
|
|
391
1071
|
models.ErrorResponse,
|
|
392
1072
|
None
|
|
393
1073
|
],
|
|
394
|
-
models.
|
|
1074
|
+
models.TeleopState
|
|
395
1075
|
]:
|
|
396
1076
|
"""
|
|
397
|
-
|
|
1077
|
+
Set ratio control parameters
|
|
398
1078
|
"""
|
|
399
|
-
path = "/api/v1/
|
|
1079
|
+
path = "/api/v1/teleop/set-ratio-control"
|
|
400
1080
|
try:
|
|
401
1081
|
response = self._request_manager.request(
|
|
402
|
-
"
|
|
1082
|
+
"POST",
|
|
403
1083
|
path,
|
|
404
1084
|
headers=self._request_manager.json_headers(),
|
|
405
|
-
|
|
406
|
-
"limit": models.serialize_i_64(limit),
|
|
407
|
-
"offset": models.serialize_i_64(offset),
|
|
408
|
-
}
|
|
1085
|
+
body=json.dumps(models.serialize_set_ratio_control_request(body)),
|
|
409
1086
|
)
|
|
410
1087
|
parsed = None
|
|
411
1088
|
if response.status == 200:
|
|
412
|
-
parsed = models.
|
|
1089
|
+
parsed = models.parse_teleop_state(json.loads(response.data))
|
|
413
1090
|
|
|
414
1091
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
415
1092
|
is_unavailable = response.status == 503
|
|
@@ -433,15 +1110,21 @@ class Default:
|
|
|
433
1110
|
|
|
434
1111
|
calibration: Calibration
|
|
435
1112
|
equipment: Equipment
|
|
1113
|
+
payload: Payload
|
|
1114
|
+
recorder: Recorder
|
|
436
1115
|
sensors: Sensors
|
|
437
1116
|
space: Space
|
|
1117
|
+
teleop: Teleop
|
|
438
1118
|
|
|
439
1119
|
def __init__(self, request_manager: RequestManager):
|
|
440
1120
|
self._request_manager = request_manager
|
|
441
1121
|
self.calibration = Default.Calibration(request_manager)
|
|
442
1122
|
self.equipment = Default.Equipment(request_manager)
|
|
1123
|
+
self.payload = Default.Payload(request_manager)
|
|
1124
|
+
self.recorder = Default.Recorder(request_manager)
|
|
443
1125
|
self.sensors = Default.Sensors(request_manager)
|
|
444
1126
|
self.space = Default.Space(request_manager)
|
|
1127
|
+
self.teleop = Default.Teleop(request_manager)
|
|
445
1128
|
|
|
446
1129
|
class Movement:
|
|
447
1130
|
_request_manager: RequestManager
|
|
@@ -476,6 +1159,7 @@ class Movement:
|
|
|
476
1159
|
Union[
|
|
477
1160
|
models.BrakesState,
|
|
478
1161
|
models.ErrorResponse,
|
|
1162
|
+
models.ErrorResponse,
|
|
479
1163
|
None
|
|
480
1164
|
],
|
|
481
1165
|
models.BrakesState
|
|
@@ -495,6 +1179,8 @@ class Movement:
|
|
|
495
1179
|
parsed = None
|
|
496
1180
|
if response.status == 200:
|
|
497
1181
|
parsed = models.parse_brakes_state(json.loads(response.data))
|
|
1182
|
+
if response.status == 500:
|
|
1183
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
498
1184
|
|
|
499
1185
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
500
1186
|
is_unavailable = response.status == 503
|
|
@@ -740,14 +1426,113 @@ class Movement:
|
|
|
740
1426
|
503,
|
|
741
1427
|
None
|
|
742
1428
|
)
|
|
1429
|
+
class PositionControlled:
|
|
1430
|
+
def __init__(self, request_manager: RequestManager):
|
|
1431
|
+
self._request_manager = request_manager
|
|
1432
|
+
|
|
1433
|
+
|
|
1434
|
+
def set_arm_position_controlled(
|
|
1435
|
+
self,
|
|
1436
|
+
body: models.ArmPositionUpdateControlledRequest,
|
|
1437
|
+
) -> Response[
|
|
1438
|
+
Union[
|
|
1439
|
+
models.SetArmPositionControlledResponse,
|
|
1440
|
+
models.ErrorResponse,
|
|
1441
|
+
None
|
|
1442
|
+
],
|
|
1443
|
+
models.SetArmPositionControlledResponse
|
|
1444
|
+
]:
|
|
1445
|
+
"""
|
|
1446
|
+
Control the position of the RO1 Robot arm in a controlled manner. This endpoint will return once the movement is initiated. The client should then call the heartbeat endpoint every 300ms to continue the movement and get its status. The robot should be idle before calling this endpoint.
|
|
1447
|
+
|
|
1448
|
+
"""
|
|
1449
|
+
path = "/api/v1/movement/position/arm/controlled"
|
|
1450
|
+
try:
|
|
1451
|
+
response = self._request_manager.request(
|
|
1452
|
+
"POST",
|
|
1453
|
+
path,
|
|
1454
|
+
headers=self._request_manager.json_headers(),
|
|
1455
|
+
body=json.dumps(models.serialize_arm_position_update_controlled_request(body)),
|
|
1456
|
+
)
|
|
1457
|
+
parsed = None
|
|
1458
|
+
if response.status == 200:
|
|
1459
|
+
parsed = models.parse_set_arm_position_controlled_response(json.loads(response.data))
|
|
1460
|
+
|
|
1461
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
1462
|
+
is_unavailable = response.status == 503
|
|
1463
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
1464
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
1465
|
+
|
|
1466
|
+
return Response(
|
|
1467
|
+
parsed,
|
|
1468
|
+
response.status,
|
|
1469
|
+
response
|
|
1470
|
+
)
|
|
1471
|
+
except urllib3.exceptions.MaxRetryError:
|
|
1472
|
+
return Response(
|
|
1473
|
+
models.ErrorResponse(
|
|
1474
|
+
error=models.ErrorEnum.InternalServerError,
|
|
1475
|
+
message="Connection Refused"
|
|
1476
|
+
),
|
|
1477
|
+
503,
|
|
1478
|
+
None
|
|
1479
|
+
)
|
|
1480
|
+
def send_heartbeat(
|
|
1481
|
+
self,
|
|
1482
|
+
command_id: str,
|
|
1483
|
+
) -> Response[
|
|
1484
|
+
Union[
|
|
1485
|
+
models.SetArmPositionControlledResponse,
|
|
1486
|
+
models.ErrorResponse,
|
|
1487
|
+
None
|
|
1488
|
+
],
|
|
1489
|
+
models.SetArmPositionControlledResponse
|
|
1490
|
+
]:
|
|
1491
|
+
"""
|
|
1492
|
+
Use this endpoint to send a heartbeat for the current movement. This assumes that the client has already called the "Set Arm Position With Heartbeat" endpoint and that the movement is in progress. This heartbeat should be sent every 300ms. The robot should be executing a movement when calling this endpoint.
|
|
1493
|
+
|
|
1494
|
+
"""
|
|
1495
|
+
path = "/api/v1/movement/position/arm/controlled/{command_id}/heartbeat"
|
|
1496
|
+
path = path.replace("{command_id}", str(command_id))
|
|
1497
|
+
try:
|
|
1498
|
+
response = self._request_manager.request(
|
|
1499
|
+
"POST",
|
|
1500
|
+
path,
|
|
1501
|
+
headers=self._request_manager.json_headers(),
|
|
1502
|
+
)
|
|
1503
|
+
parsed = None
|
|
1504
|
+
if response.status == 200:
|
|
1505
|
+
parsed = models.parse_set_arm_position_controlled_response(json.loads(response.data))
|
|
1506
|
+
|
|
1507
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
1508
|
+
is_unavailable = response.status == 503
|
|
1509
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
1510
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
1511
|
+
|
|
1512
|
+
return Response(
|
|
1513
|
+
parsed,
|
|
1514
|
+
response.status,
|
|
1515
|
+
response
|
|
1516
|
+
)
|
|
1517
|
+
except urllib3.exceptions.MaxRetryError:
|
|
1518
|
+
return Response(
|
|
1519
|
+
models.ErrorResponse(
|
|
1520
|
+
error=models.ErrorEnum.InternalServerError,
|
|
1521
|
+
message="Connection Refused"
|
|
1522
|
+
),
|
|
1523
|
+
503,
|
|
1524
|
+
None
|
|
1525
|
+
)
|
|
743
1526
|
|
|
744
1527
|
brakes: Brakes
|
|
745
1528
|
position: Position
|
|
1529
|
+
position_controlled: PositionControlled
|
|
746
1530
|
|
|
747
1531
|
def __init__(self, request_manager: RequestManager):
|
|
748
1532
|
self._request_manager = request_manager
|
|
749
1533
|
self.brakes = Movement.Brakes(request_manager)
|
|
750
1534
|
self.position = Movement.Position(request_manager)
|
|
1535
|
+
self.position_controlled = Movement.PositionControlled(request_manager)
|
|
751
1536
|
|
|
752
1537
|
class Camera:
|
|
753
1538
|
_request_manager: RequestManager
|
|
@@ -764,7 +1549,7 @@ class Camera:
|
|
|
764
1549
|
None
|
|
765
1550
|
]:
|
|
766
1551
|
"""
|
|
767
|
-
Retrieve the latest RGB frame from the camera. In JPEG format.
|
|
1552
|
+
Retrieve the latest RGB frame from the camera as base64 string. In JPEG format.
|
|
768
1553
|
"""
|
|
769
1554
|
path = "/api/v1/camera/frame/rgb"
|
|
770
1555
|
try:
|
|
@@ -798,8 +1583,12 @@ class Camera:
|
|
|
798
1583
|
def get_camera_intrinsics_color(
|
|
799
1584
|
self,
|
|
800
1585
|
) -> Response[
|
|
801
|
-
|
|
802
|
-
|
|
1586
|
+
Union[
|
|
1587
|
+
models.CameraIntrinsics,
|
|
1588
|
+
models.ErrorResponse,
|
|
1589
|
+
None
|
|
1590
|
+
],
|
|
1591
|
+
models.CameraIntrinsics
|
|
803
1592
|
]:
|
|
804
1593
|
"""
|
|
805
1594
|
Retrieve the intrinsic parameters for the color camera.
|
|
@@ -812,6 +1601,8 @@ class Camera:
|
|
|
812
1601
|
headers=self._request_manager.json_headers(),
|
|
813
1602
|
)
|
|
814
1603
|
parsed = None
|
|
1604
|
+
if response.status == 200:
|
|
1605
|
+
parsed = models.parse_camera_intrinsics(json.loads(response.data))
|
|
815
1606
|
|
|
816
1607
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
817
1608
|
is_unavailable = response.status == 503
|
|
@@ -839,7 +1630,7 @@ class Camera:
|
|
|
839
1630
|
None
|
|
840
1631
|
]:
|
|
841
1632
|
"""
|
|
842
|
-
Retrieve the latest RGB frame from the camera.
|
|
1633
|
+
Retrieve the latest RGB frame from the camera as base64 string. In JPEG format.
|
|
843
1634
|
"""
|
|
844
1635
|
path = "/api/v1/camera/stream/rgb"
|
|
845
1636
|
try:
|
|
@@ -914,14 +1705,64 @@ class Camera:
|
|
|
914
1705
|
503,
|
|
915
1706
|
None
|
|
916
1707
|
)
|
|
1708
|
+
class Status:
|
|
1709
|
+
def __init__(self, request_manager: RequestManager):
|
|
1710
|
+
self._request_manager = request_manager
|
|
1711
|
+
|
|
1712
|
+
|
|
1713
|
+
def get_camera_status(
|
|
1714
|
+
self,
|
|
1715
|
+
) -> Response[
|
|
1716
|
+
Union[
|
|
1717
|
+
models.CameraStatus,
|
|
1718
|
+
models.ErrorResponse,
|
|
1719
|
+
None
|
|
1720
|
+
],
|
|
1721
|
+
models.CameraStatus
|
|
1722
|
+
]:
|
|
1723
|
+
"""
|
|
1724
|
+
Retrieve the current status of the camera.
|
|
1725
|
+
"""
|
|
1726
|
+
path = "/api/v1/camera/status"
|
|
1727
|
+
try:
|
|
1728
|
+
response = self._request_manager.request(
|
|
1729
|
+
"GET",
|
|
1730
|
+
path,
|
|
1731
|
+
headers=self._request_manager.json_headers(),
|
|
1732
|
+
)
|
|
1733
|
+
parsed = None
|
|
1734
|
+
if response.status == 200:
|
|
1735
|
+
parsed = models.parse_camera_status(json.loads(response.data))
|
|
1736
|
+
|
|
1737
|
+
is_user_error = response.status >= 400 and response.status <= 500
|
|
1738
|
+
is_unavailable = response.status == 503
|
|
1739
|
+
if parsed is None and (is_user_error or is_unavailable):
|
|
1740
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
1741
|
+
|
|
1742
|
+
return Response(
|
|
1743
|
+
parsed,
|
|
1744
|
+
response.status,
|
|
1745
|
+
response
|
|
1746
|
+
)
|
|
1747
|
+
except urllib3.exceptions.MaxRetryError:
|
|
1748
|
+
return Response(
|
|
1749
|
+
models.ErrorResponse(
|
|
1750
|
+
error=models.ErrorEnum.InternalServerError,
|
|
1751
|
+
message="Connection Refused"
|
|
1752
|
+
),
|
|
1753
|
+
503,
|
|
1754
|
+
None
|
|
1755
|
+
)
|
|
917
1756
|
|
|
918
1757
|
data: Data
|
|
919
1758
|
settings: Settings
|
|
1759
|
+
status: Status
|
|
920
1760
|
|
|
921
1761
|
def __init__(self, request_manager: RequestManager):
|
|
922
1762
|
self._request_manager = request_manager
|
|
923
1763
|
self.data = Camera.Data(request_manager)
|
|
924
1764
|
self.settings = Camera.Settings(request_manager)
|
|
1765
|
+
self.status = Camera.Status(request_manager)
|
|
925
1766
|
|
|
926
1767
|
class Faults:
|
|
927
1768
|
_request_manager: RequestManager
|
|
@@ -1978,8 +2819,13 @@ class RoutineEditor:
|
|
|
1978
2819
|
body: models.PlayRoutineRequest,
|
|
1979
2820
|
routine_id: str,
|
|
1980
2821
|
) -> Response[
|
|
1981
|
-
|
|
1982
|
-
|
|
2822
|
+
Union[
|
|
2823
|
+
models.PlayRoutineResponse,
|
|
2824
|
+
models.ErrorResponse,
|
|
2825
|
+
models.ErrorResponse,
|
|
2826
|
+
None
|
|
2827
|
+
],
|
|
2828
|
+
models.PlayRoutineResponse
|
|
1983
2829
|
]:
|
|
1984
2830
|
"""
|
|
1985
2831
|
Play a routine
|
|
@@ -1994,6 +2840,10 @@ class RoutineEditor:
|
|
|
1994
2840
|
body=json.dumps(models.serialize_play_routine_request(body)),
|
|
1995
2841
|
)
|
|
1996
2842
|
parsed = None
|
|
2843
|
+
if response.status == 200:
|
|
2844
|
+
parsed = models.parse_play_routine_response(json.loads(response.data))
|
|
2845
|
+
if response.status == 400:
|
|
2846
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
1997
2847
|
|
|
1998
2848
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
1999
2849
|
is_unavailable = response.status == 503
|
|
@@ -2018,11 +2868,15 @@ class RoutineEditor:
|
|
|
2018
2868
|
self,
|
|
2019
2869
|
routine_id: str,
|
|
2020
2870
|
) -> Response[
|
|
2021
|
-
|
|
2871
|
+
Union[
|
|
2872
|
+
models.ErrorResponse,
|
|
2873
|
+
models.ErrorResponse,
|
|
2874
|
+
None
|
|
2875
|
+
],
|
|
2022
2876
|
None
|
|
2023
2877
|
]:
|
|
2024
2878
|
"""
|
|
2025
|
-
Pause a routine
|
|
2879
|
+
Pause a routine. Routine must be running.
|
|
2026
2880
|
"""
|
|
2027
2881
|
path = "/api/v1/routine-editor/routines/{routine_id}/pause"
|
|
2028
2882
|
path = path.replace("{routine_id}", str(routine_id))
|
|
@@ -2033,6 +2887,8 @@ class RoutineEditor:
|
|
|
2033
2887
|
headers=self._request_manager.json_headers(),
|
|
2034
2888
|
)
|
|
2035
2889
|
parsed = None
|
|
2890
|
+
if response.status == 400:
|
|
2891
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
2036
2892
|
|
|
2037
2893
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
2038
2894
|
is_unavailable = response.status == 503
|
|
@@ -2056,11 +2912,15 @@ class RoutineEditor:
|
|
|
2056
2912
|
def stop(
|
|
2057
2913
|
self,
|
|
2058
2914
|
) -> Response[
|
|
2059
|
-
|
|
2915
|
+
Union[
|
|
2916
|
+
models.ErrorResponse,
|
|
2917
|
+
models.ErrorResponse,
|
|
2918
|
+
None
|
|
2919
|
+
],
|
|
2060
2920
|
None
|
|
2061
2921
|
]:
|
|
2062
2922
|
"""
|
|
2063
|
-
Stop running routine and all ongoing motions
|
|
2923
|
+
Stop running routine and all ongoing motions. Routine must be running.
|
|
2064
2924
|
"""
|
|
2065
2925
|
path = "/api/v1/routine-editor/stop"
|
|
2066
2926
|
try:
|
|
@@ -2070,6 +2930,8 @@ class RoutineEditor:
|
|
|
2070
2930
|
headers=self._request_manager.json_headers(),
|
|
2071
2931
|
)
|
|
2072
2932
|
parsed = None
|
|
2933
|
+
if response.status == 400:
|
|
2934
|
+
parsed = models.parse_error_response(json.loads(response.data))
|
|
2073
2935
|
|
|
2074
2936
|
is_user_error = response.status >= 400 and response.status <= 500
|
|
2075
2937
|
is_unavailable = response.status == 503
|
|
@@ -2344,7 +3206,7 @@ class RoutineEditor:
|
|
|
2344
3206
|
models.RuntimeVariable
|
|
2345
3207
|
]:
|
|
2346
3208
|
"""
|
|
2347
|
-
Returns current state of a variable
|
|
3209
|
+
Returns current state of a variable. Routine must be running.
|
|
2348
3210
|
"""
|
|
2349
3211
|
path = "/api/v1/routine-editor/variables/{variable_name}"
|
|
2350
3212
|
path = path.replace("{variable_name}", str(variable_name))
|
|
@@ -2390,7 +3252,7 @@ class RoutineEditor:
|
|
|
2390
3252
|
models.RuntimeVariable
|
|
2391
3253
|
]:
|
|
2392
3254
|
"""
|
|
2393
|
-
Update the value of a variable
|
|
3255
|
+
Update the value of a variable. Routine must be running.
|
|
2394
3256
|
"""
|
|
2395
3257
|
path = "/api/v1/routine-editor/variables/{variable_name}"
|
|
2396
3258
|
path = path.replace("{variable_name}", str(variable_name))
|